Mercurial > libervia-backend
comparison libervia/backend/memory/memory.py @ 4270:0d7bb4df2343
Reformatted code base using black.
author | Goffi <goffi@goffi.org> |
---|---|
date | Wed, 19 Jun 2024 18:44:57 +0200 |
parents | 64a85ce8be70 |
children |
comparison
equal
deleted
inserted
replaced
4269:64a85ce8be70 | 4270:0d7bb4df2343 |
---|---|
153 | 153 |
154 def __setitem__(self, key, value): | 154 def __setitem__(self, key, value): |
155 raise NotImplementedError("You need do use new_session to create a session") | 155 raise NotImplementedError("You need do use new_session to create a session") |
156 | 156 |
157 def __delitem__(self, session_id): | 157 def __delitem__(self, session_id): |
158 """ delete the session data """ | 158 """delete the session data""" |
159 self._purge_session(session_id) | 159 self._purge_session(session_id) |
160 | 160 |
161 def keys(self): | 161 def keys(self): |
162 return list(self._sessions.keys()) | 162 return list(self._sessions.keys()) |
163 | 163 |
250 self.config = tools_config.parse_main_conf(log_filenames=True) | 250 self.config = tools_config.parse_main_conf(log_filenames=True) |
251 self._cache_path = Path(self.config_get("", "local_dir"), C.CACHE_DIR) | 251 self._cache_path = Path(self.config_get("", "local_dir"), C.CACHE_DIR) |
252 self.admins = self.config_get("", "admins_list", []) | 252 self.admins = self.config_get("", "admins_list", []) |
253 self.admin_jids = set() | 253 self.admin_jids = set() |
254 self._file_path_lock = defer.DeferredLock() | 254 self._file_path_lock = defer.DeferredLock() |
255 | |
256 | 255 |
257 async def initialise(self): | 256 async def initialise(self): |
258 self.storage = Storage() | 257 self.storage = Storage() |
259 await self.storage.initialise() | 258 await self.storage.initialise() |
260 PersistentDict.storage = self.storage | 259 PersistentDict.storage = self.storage |
279 except RuntimeError: | 278 except RuntimeError: |
280 log.warning(f"Invalid JID for admin {admin}: {admin_jid_s}") | 279 log.warning(f"Invalid JID for admin {admin}: {admin_jid_s}") |
281 else: | 280 else: |
282 self.admin_jids.add(admin_jid) | 281 self.admin_jids.add(admin_jid) |
283 | 282 |
284 | |
285 ## Configuration ## | 283 ## Configuration ## |
286 | 284 |
287 def config_get( | 285 def config_get( |
288 self, | 286 self, section: str | None, name: str, default: Any = None |
289 section: str|None, | 287 ) -> str | list | dict: |
290 name: str, | |
291 default: Any = None | |
292 ) -> str|list|dict: | |
293 """Get the main configuration option | 288 """Get the main configuration option |
294 | 289 |
295 @param section: section of the config file (None or '' for DEFAULT) | 290 @param section: section of the config file (None or '' for DEFAULT) |
296 @param name: name of the option | 291 @param name: name of the option |
297 @param default: value to use if not found | 292 @param default: value to use if not found |
346 return self.params.load_ind_params(profile) | 341 return self.params.load_ind_params(profile) |
347 | 342 |
348 ## Profiles/Sessions management ## | 343 ## Profiles/Sessions management ## |
349 | 344 |
350 def start_session(self, password, profile): | 345 def start_session(self, password, profile): |
351 """"Iniatialise session for a profile | 346 """ "Iniatialise session for a profile |
352 | 347 |
353 @param password(unicode): profile session password | 348 @param password(unicode): profile session password |
354 or empty string is no password is set | 349 or empty string is no password is set |
355 @param profile: %(doc_profile)s | 350 @param profile: %(doc_profile)s |
356 @raise exceptions.ProfileUnknownError if profile doesn't exists | 351 @raise exceptions.ProfileUnknownError if profile doesn't exists |
373 # if there is a value at this point in self._entities_cache, | 368 # if there is a value at this point in self._entities_cache, |
374 # it is the load_individual_params Deferred, the session is starting | 369 # it is the load_individual_params Deferred, the session is starting |
375 session_d = self._entities_cache[profile] | 370 session_d = self._entities_cache[profile] |
376 except KeyError: | 371 except KeyError: |
377 # else we do request the params | 372 # else we do request the params |
378 session_d = self._entities_cache[profile] = self.load_individual_params( | 373 session_d = self._entities_cache[profile] = ( |
379 profile | 374 self.load_individual_params(profile) |
380 ) | 375 ) |
381 session_d.addCallback(create_session) | 376 session_d.addCallback(create_session) |
382 finally: | 377 finally: |
383 return session_d | 378 return session_d |
384 | 379 |
432 sat_cipher = await self.param_get_a_async( | 427 sat_cipher = await self.param_get_a_async( |
433 C.PROFILE_PASS_PATH[1], C.PROFILE_PASS_PATH[0], profile_key=profile | 428 C.PROFILE_PASS_PATH[1], C.PROFILE_PASS_PATH[0], profile_key=profile |
434 ) | 429 ) |
435 valid = PasswordHasher.verify(password, sat_cipher) | 430 valid = PasswordHasher.verify(password, sat_cipher) |
436 if not valid: | 431 if not valid: |
437 log.warning(_("Authentication failure of profile {profile}").format( | 432 log.warning( |
438 profile=profile)) | 433 _("Authentication failure of profile {profile}").format(profile=profile) |
434 ) | |
439 raise exceptions.PasswordError("The provided profile password doesn't match.") | 435 raise exceptions.PasswordError("The provided profile password doesn't match.") |
440 return await self.new_auth_session(password, profile) | 436 return await self.new_auth_session(password, profile) |
441 | 437 |
442 async def new_auth_session(self, key, profile): | 438 async def new_auth_session(self, key, profile): |
443 """Start a new session for the authenticated profile. | 439 """Start a new session for the authenticated profile. |
465 self.params.purge_profile(profile) | 461 self.params.purge_profile(profile) |
466 try: | 462 try: |
467 del self._entities_cache[profile] | 463 del self._entities_cache[profile] |
468 except KeyError: | 464 except KeyError: |
469 log.error( | 465 log.error( |
470 _( | 466 _("Trying to purge roster status cache for a profile not in memory: [%s]") |
471 "Trying to purge roster status cache for a profile not in memory: [%s]" | |
472 ) | |
473 % profile | 467 % profile |
474 ) | 468 ) |
475 | 469 |
476 def get_profiles_list(self, clients=True, components=False): | 470 def get_profiles_list(self, clients=True, components=False): |
477 """retrieve profiles list | 471 """retrieve profiles list |
552 | 546 |
553 def init_personal_key(__): | 547 def init_personal_key(__): |
554 # be sure to call this after checking that the profile doesn't exist yet | 548 # be sure to call this after checking that the profile doesn't exist yet |
555 | 549 |
556 # generated once for all and saved in a PersistentDict | 550 # generated once for all and saved in a PersistentDict |
557 personal_key = BlockCipher.get_random_key( | 551 personal_key = BlockCipher.get_random_key(base64=True).decode("utf-8") |
558 base64=True | |
559 ).decode('utf-8') | |
560 self.auth_sessions.new_session( | 552 self.auth_sessions.new_session( |
561 {C.MEMORY_CRYPTO_KEY: personal_key}, profile=name | 553 {C.MEMORY_CRYPTO_KEY: personal_key}, profile=name |
562 ) # will be encrypted by param_set | 554 ) # will be encrypted by param_set |
563 | 555 |
564 def start_fake_session(__): | 556 def start_fake_session(__): |
624 def add_to_history(self, client, data): | 616 def add_to_history(self, client, data): |
625 return self.storage.add_to_history(data, client.profile) | 617 return self.storage.add_to_history(data, client.profile) |
626 | 618 |
627 def _history_get_serialise(self, history_data): | 619 def _history_get_serialise(self, history_data): |
628 return [ | 620 return [ |
629 (uid, timestamp, from_jid, to_jid, message, subject, mess_type, | 621 ( |
630 data_format.serialise(extra)) for uid, timestamp, from_jid, to_jid, message, | 622 uid, |
631 subject, mess_type, extra in history_data | 623 timestamp, |
624 from_jid, | |
625 to_jid, | |
626 message, | |
627 subject, | |
628 mess_type, | |
629 data_format.serialise(extra), | |
630 ) | |
631 for uid, timestamp, from_jid, to_jid, message, subject, mess_type, extra in history_data | |
632 ] | 632 ] |
633 | 633 |
634 def _history_get(self, from_jid_s, to_jid_s, limit=C.HISTORY_LIMIT_NONE, between=True, | 634 def _history_get( |
635 filters=None, profile=C.PROF_KEY_NONE): | 635 self, |
636 from_jid_s, | |
637 to_jid_s, | |
638 limit=C.HISTORY_LIMIT_NONE, | |
639 between=True, | |
640 filters=None, | |
641 profile=C.PROF_KEY_NONE, | |
642 ): | |
636 from_jid = jid.JID(from_jid_s) if from_jid_s else None | 643 from_jid = jid.JID(from_jid_s) if from_jid_s else None |
637 to_jid = jid.JID(to_jid_s) if to_jid_s else None | 644 to_jid = jid.JID(to_jid_s) if to_jid_s else None |
638 d = self.history_get( | 645 d = self.history_get(from_jid, to_jid, limit, between, filters, profile) |
639 from_jid, to_jid, limit, between, filters, profile | |
640 ) | |
641 d.addCallback(self._history_get_serialise) | 646 d.addCallback(self._history_get_serialise) |
642 return d | 647 return d |
643 | 648 |
644 def history_get( | 649 def history_get( |
645 self, | 650 self, |
646 from_jid: jid.JID|None, | 651 from_jid: jid.JID | None, |
647 to_jid: jid.JID|None, | 652 to_jid: jid.JID | None, |
648 limit: int = C.HISTORY_LIMIT_NONE, | 653 limit: int = C.HISTORY_LIMIT_NONE, |
649 between: bool = True, | 654 between: bool = True, |
650 filters: dict[str, str]|None = None, | 655 filters: dict[str, str] | None = None, |
651 profile: str = C.PROF_KEY_NONE | 656 profile: str = C.PROF_KEY_NONE, |
652 ) -> defer.Deferred[list]: | 657 ) -> defer.Deferred[list]: |
653 """Retrieve messages in history | 658 """Retrieve messages in history |
654 | 659 |
655 @param from_jid: source JID (full, or bare for catchall) | 660 @param from_jid: source JID (full, or bare for catchall) |
656 @param to_jid: dest JID (full, or bare for catchall) | 661 @param to_jid: dest JID (full, or bare for catchall) |
669 limit = int(self.param_get_a(C.HISTORY_LIMIT, "General", profile_key=profile)) | 674 limit = int(self.param_get_a(C.HISTORY_LIMIT, "General", profile_key=profile)) |
670 elif limit == C.HISTORY_LIMIT_NONE: | 675 elif limit == C.HISTORY_LIMIT_NONE: |
671 limit = None | 676 limit = None |
672 if limit == 0: | 677 if limit == 0: |
673 return defer.succeed([]) | 678 return defer.succeed([]) |
674 return self.storage.history_get(from_jid, to_jid, limit, between, filters, profile) | 679 return self.storage.history_get( |
680 from_jid, to_jid, limit, between, filters, profile | |
681 ) | |
675 | 682 |
676 ## Statuses ## | 683 ## Statuses ## |
677 | 684 |
678 def _get_presence_statuses(self, profile_key): | 685 def _get_presence_statuses(self, profile_key): |
679 ret = self.presence_statuses_get(profile_key) | 686 ret = self.presence_statuses_get(profile_key) |
712 @param statuses: dictionary of statuses | 719 @param statuses: dictionary of statuses |
713 @param profile_key: %(doc_profile_key)s | 720 @param profile_key: %(doc_profile_key)s |
714 """ | 721 """ |
715 client = self.host.get_client(profile_key) | 722 client = self.host.get_client(profile_key) |
716 presence_data = PresenceTuple(show, priority, statuses) | 723 presence_data = PresenceTuple(show, priority, statuses) |
717 self.update_entity_data( | 724 self.update_entity_data(client, entity_jid, "presence", presence_data) |
718 client, entity_jid, "presence", presence_data | |
719 ) | |
720 if entity_jid.resource and show != C.PRESENCE_UNAVAILABLE: | 725 if entity_jid.resource and show != C.PRESENCE_UNAVAILABLE: |
721 # If a resource is available, bare jid should not have presence information | 726 # If a resource is available, bare jid should not have presence information |
722 try: | 727 try: |
723 self.del_entity_datum(client, entity_jid.userhostJID(), "presence") | 728 self.del_entity_datum(client, entity_jid.userhostJID(), "presence") |
724 except (KeyError, exceptions.UnknownEntityError): | 729 except (KeyError, exceptions.UnknownEntityError): |
741 @raise ValueError: entity_jid has a resource | 746 @raise ValueError: entity_jid has a resource |
742 """ | 747 """ |
743 # FIXME: is there a need to keep cache data for resources which are not connected anymore? | 748 # FIXME: is there a need to keep cache data for resources which are not connected anymore? |
744 if entity_jid.resource: | 749 if entity_jid.resource: |
745 raise ValueError( | 750 raise ValueError( |
746 "get_all_resources must be used with a bare jid (got {})".format(entity_jid) | 751 "get_all_resources must be used with a bare jid (got {})".format( |
752 entity_jid | |
753 ) | |
747 ) | 754 ) |
748 profile_cache = self._get_profile_cache(client) | 755 profile_cache = self._get_profile_cache(client) |
749 try: | 756 try: |
750 entity_data = profile_cache[entity_jid.userhostJID()] | 757 entity_data = profile_cache[entity_jid.userhostJID()] |
751 except KeyError: | 758 except KeyError: |
789 @param entity_jid: bare entity jid | 796 @param entity_jid: bare entity jid |
790 @return (unicode): main resource or None | 797 @return (unicode): main resource or None |
791 """ | 798 """ |
792 if entity_jid.resource: | 799 if entity_jid.resource: |
793 raise ValueError( | 800 raise ValueError( |
794 "main_resource_get must be used with a bare jid (got {})".format(entity_jid) | 801 "main_resource_get must be used with a bare jid (got {})".format( |
802 entity_jid | |
803 ) | |
795 ) | 804 ) |
796 try: | 805 try: |
797 if self.host.plugins["XEP-0045"].is_joined_room(client, entity_jid): | 806 if self.host.plugins["XEP-0045"].is_joined_room(client, entity_jid): |
798 return None # MUC rooms have no main resource | 807 return None # MUC rooms have no main resource |
799 except KeyError: # plugin not found | 808 except KeyError: # plugin not found |
855 continue | 864 continue |
856 full_jid = copy.copy(bare_jid) | 865 full_jid = copy.copy(bare_jid) |
857 full_jid.resource = resource | 866 full_jid.resource = resource |
858 yield full_jid | 867 yield full_jid |
859 | 868 |
860 def update_entity_data( | 869 def update_entity_data(self, client, entity_jid, key, value, silent=False): |
861 self, client, entity_jid, key, value, silent=False | |
862 ): | |
863 """Set a misc data for an entity | 870 """Set a misc data for an entity |
864 | 871 |
865 If key was registered with set_signal_on_update, a signal will be sent to frontends | 872 If key was registered with set_signal_on_update, a signal will be sent to frontends |
866 @param entity_jid: JID of the entity, C.ENTITY_ALL_RESOURCES for all resources of | 873 @param entity_jid: JID of the entity, C.ENTITY_ALL_RESOURCES for all resources of |
867 all entities, C.ENTITY_ALL for all entities (all resources + bare jids) | 874 all entities, C.ENTITY_ALL for all entities (all resources + bare jids) |
882 ) | 889 ) |
883 | 890 |
884 entity_data[key] = value | 891 entity_data[key] = value |
885 if key in self._key_signals and not silent: | 892 if key in self._key_signals and not silent: |
886 self.host.bridge.entity_data_updated( | 893 self.host.bridge.entity_data_updated( |
887 jid_.full(), | 894 jid_.full(), key, data_format.serialise(value), client.profile |
888 key, | |
889 data_format.serialise(value), | |
890 client.profile | |
891 ) | 895 ) |
892 | 896 |
893 def del_entity_datum(self, client, entity_jid, key): | 897 def del_entity_datum(self, client, entity_jid, key): |
894 """Delete a data for an entity | 898 """Delete a data for an entity |
895 | 899 |
908 | 912 |
909 for jid_ in entities: | 913 for jid_ in entities: |
910 try: | 914 try: |
911 entity_data = profile_cache[jid_.userhostJID()][jid_.resource] | 915 entity_data = profile_cache[jid_.userhostJID()][jid_.resource] |
912 except KeyError: | 916 except KeyError: |
913 raise exceptions.UnknownEntityError( | 917 raise exceptions.UnknownEntityError("Entity {} not in cache".format(jid_)) |
914 "Entity {} not in cache".format(jid_) | |
915 ) | |
916 try: | 918 try: |
917 del entity_data[key] | 919 del entity_data[key] |
918 except KeyError as e: | 920 except KeyError as e: |
919 if entity_jid in (C.ENTITY_ALL_RESOURCES, C.ENTITY_ALL): | 921 if entity_jid in (C.ENTITY_ALL_RESOURCES, C.ENTITY_ALL): |
920 continue # we ignore KeyError when deleting keys from several entities | 922 continue # we ignore KeyError when deleting keys from several entities |
925 client = self.host.get_client(profile_key) | 927 client = self.host.get_client(profile_key) |
926 ret = self.entities_data_get( | 928 ret = self.entities_data_get( |
927 client, [jid.JID(jid_) for jid_ in entities_jids], keys_list | 929 client, [jid.JID(jid_) for jid_ in entities_jids], keys_list |
928 ) | 930 ) |
929 return { | 931 return { |
930 jid_.full(): {k: data_format.serialise(v) for k,v in data.items()} | 932 jid_.full(): {k: data_format.serialise(v) for k, v in data.items()} |
931 for jid_, data in ret.items() | 933 for jid_, data in ret.items() |
932 } | 934 } |
933 | 935 |
934 def entities_data_get(self, client, entities_jids, keys_list=None): | 936 def entities_data_get(self, client, entities_jids, keys_list=None): |
935 """Get a list of cached values for several entities at once | 937 """Get a list of cached values for several entities at once |
978 | 980 |
979 return ret_data | 981 return ret_data |
980 | 982 |
981 def _get_entity_data(self, entity_jid_s, keys_list=None, profile=C.PROF_KEY_NONE): | 983 def _get_entity_data(self, entity_jid_s, keys_list=None, profile=C.PROF_KEY_NONE): |
982 return self.entity_data_get( | 984 return self.entity_data_get( |
983 self.host.get_client(profile), jid.JID(entity_jid_s), keys_list) | 985 self.host.get_client(profile), jid.JID(entity_jid_s), keys_list |
986 ) | |
984 | 987 |
985 def entity_data_get(self, client, entity_jid, keys_list=None): | 988 def entity_data_get(self, client, entity_jid, keys_list=None): |
986 """Get a list of cached values for entity | 989 """Get a list of cached values for entity |
987 | 990 |
988 @param entity_jid: JID of the entity | 991 @param entity_jid: JID of the entity |
997 profile_cache = self._get_profile_cache(client) | 1000 profile_cache = self._get_profile_cache(client) |
998 try: | 1001 try: |
999 entity_data = profile_cache[entity_jid.userhostJID()][entity_jid.resource] | 1002 entity_data = profile_cache[entity_jid.userhostJID()][entity_jid.resource] |
1000 except KeyError: | 1003 except KeyError: |
1001 raise exceptions.UnknownEntityError( | 1004 raise exceptions.UnknownEntityError( |
1002 "Entity {} not in cache (was requesting {})".format( | 1005 "Entity {} not in cache (was requesting {})".format(entity_jid, keys_list) |
1003 entity_jid, keys_list | |
1004 ) | |
1005 ) | 1006 ) |
1006 if keys_list is None: | 1007 if keys_list is None: |
1007 return entity_data | 1008 return entity_data |
1008 | 1009 |
1009 return {key: entity_data[key] for key in keys_list if key in entity_data} | 1010 return {key: entity_data[key] for key in keys_list if key in entity_data} |
1144 | 1145 |
1145 return self.subscriptions[profile] | 1146 return self.subscriptions[profile] |
1146 | 1147 |
1147 ## Parameters ## | 1148 ## Parameters ## |
1148 | 1149 |
1149 def get_string_param_a(self, name, category, attr="value", profile_key=C.PROF_KEY_NONE): | 1150 def get_string_param_a( |
1151 self, name, category, attr="value", profile_key=C.PROF_KEY_NONE | |
1152 ): | |
1150 return self.params.get_string_param_a(name, category, attr, profile_key) | 1153 return self.params.get_string_param_a(name, category, attr, profile_key) |
1151 | 1154 |
1152 def param_get_a(self, name, category, attr="value", profile_key=C.PROF_KEY_NONE): | 1155 def param_get_a(self, name, category, attr="value", profile_key=C.PROF_KEY_NONE): |
1153 return self.params.param_get_a(name, category, attr, profile_key=profile_key) | 1156 return self.params.param_get_a(name, category, attr, profile_key=profile_key) |
1154 | 1157 |
1170 return self.params._get_params_values_from_category( | 1173 return self.params._get_params_values_from_category( |
1171 category, security_limit, app, extra_s, profile_key | 1174 category, security_limit, app, extra_s, profile_key |
1172 ) | 1175 ) |
1173 | 1176 |
1174 def async_get_string_param_a( | 1177 def async_get_string_param_a( |
1175 self, name, category, attribute="value", security_limit=C.NO_SECURITY_LIMIT, | 1178 self, |
1176 profile_key=C.PROF_KEY_NONE): | 1179 name, |
1180 category, | |
1181 attribute="value", | |
1182 security_limit=C.NO_SECURITY_LIMIT, | |
1183 profile_key=C.PROF_KEY_NONE, | |
1184 ): | |
1177 | 1185 |
1178 profile = self.get_profile_name(profile_key) | 1186 profile = self.get_profile_name(profile_key) |
1179 return defer.ensureDeferred(self.params.async_get_string_param_a( | 1187 return defer.ensureDeferred( |
1180 name, category, attribute, security_limit, profile | 1188 self.params.async_get_string_param_a( |
1181 )) | 1189 name, category, attribute, security_limit, profile |
1190 ) | |
1191 ) | |
1182 | 1192 |
1183 def _get_params_ui(self, security_limit, app, extra_s, profile_key): | 1193 def _get_params_ui(self, security_limit, app, extra_s, profile_key): |
1184 return self.params._get_params_ui(security_limit, app, extra_s, profile_key) | 1194 return self.params._get_params_ui(security_limit, app, extra_s, profile_key) |
1185 | 1195 |
1186 def params_categories_get(self): | 1196 def params_categories_get(self): |
1209 | 1219 |
1210 def _private_data_set(self, namespace, key, data_s, profile_key): | 1220 def _private_data_set(self, namespace, key, data_s, profile_key): |
1211 client = self.host.get_client(profile_key) | 1221 client = self.host.get_client(profile_key) |
1212 # we accept any type | 1222 # we accept any type |
1213 data = data_format.deserialise(data_s, type_check=None) | 1223 data = data_format.deserialise(data_s, type_check=None) |
1214 return defer.ensureDeferred(self.storage.set_private_value( | 1224 return defer.ensureDeferred( |
1215 namespace, key, data, binary=True, profile=client.profile)) | 1225 self.storage.set_private_value( |
1226 namespace, key, data, binary=True, profile=client.profile | |
1227 ) | |
1228 ) | |
1216 | 1229 |
1217 def _private_data_get(self, namespace, key, profile_key): | 1230 def _private_data_get(self, namespace, key, profile_key): |
1218 client = self.host.get_client(profile_key) | 1231 client = self.host.get_client(profile_key) |
1219 d = defer.ensureDeferred( | 1232 d = defer.ensureDeferred( |
1220 self.storage.get_privates( | 1233 self.storage.get_privates( |
1221 namespace, [key], binary=True, profile=client.profile) | 1234 namespace, [key], binary=True, profile=client.profile |
1235 ) | |
1222 ) | 1236 ) |
1223 d.addCallback(lambda data_dict: data_format.serialise(data_dict.get(key))) | 1237 d.addCallback(lambda data_dict: data_format.serialise(data_dict.get(key))) |
1224 return d | 1238 return d |
1225 | 1239 |
1226 def _private_data_delete(self, namespace, key, profile_key): | 1240 def _private_data_delete(self, namespace, key, profile_key): |
1227 client = self.host.get_client(profile_key) | 1241 client = self.host.get_client(profile_key) |
1228 return defer.ensureDeferred(self.storage.del_private_value( | 1242 return defer.ensureDeferred( |
1229 namespace, key, binary=True, profile=client.profile)) | 1243 self.storage.del_private_value( |
1244 namespace, key, binary=True, profile=client.profile | |
1245 ) | |
1246 ) | |
1230 | 1247 |
1231 ##Â Files ## | 1248 ##Â Files ## |
1232 | 1249 |
1233 def check_file_permission( | 1250 def check_file_permission( |
1234 self, | 1251 self, |
1235 file_data: dict, | 1252 file_data: dict, |
1236 peer_jid: Optional[jid.JID], | 1253 peer_jid: Optional[jid.JID], |
1237 perms_to_check: Optional[Tuple[str]], | 1254 perms_to_check: Optional[Tuple[str]], |
1238 set_affiliation: bool = False | 1255 set_affiliation: bool = False, |
1239 ) -> None: | 1256 ) -> None: |
1240 """Check that an entity has the right permission on a file | 1257 """Check that an entity has the right permission on a file |
1241 | 1258 |
1242 @param file_data: data of one file, as returned by get_files | 1259 @param file_data: data of one file, as returned by get_files |
1243 @param peer_jid: entity trying to access the file | 1260 @param peer_jid: entity trying to access the file |
1254 if peer_jid is None and perms_to_check is None: | 1271 if peer_jid is None and perms_to_check is None: |
1255 return | 1272 return |
1256 peer_jid = peer_jid.userhostJID() | 1273 peer_jid = peer_jid.userhostJID() |
1257 if peer_jid == file_data["owner"]: | 1274 if peer_jid == file_data["owner"]: |
1258 if set_affiliation: | 1275 if set_affiliation: |
1259 file_data['affiliation'] = 'owner' | 1276 file_data["affiliation"] = "owner" |
1260 # the owner has all rights, nothing to check | 1277 # the owner has all rights, nothing to check |
1261 return | 1278 return |
1262 if not C.ACCESS_PERMS.issuperset(perms_to_check): | 1279 if not C.ACCESS_PERMS.issuperset(perms_to_check): |
1263 raise exceptions.InternalError(_("invalid permission")) | 1280 raise exceptions.InternalError(_("invalid permission")) |
1264 | 1281 |
1272 # No permission is set. | 1289 # No permission is set. |
1273 # If we are in a root file/directory, we deny access | 1290 # If we are in a root file/directory, we deny access |
1274 # otherwise, we use public permission, as the parent directory will | 1291 # otherwise, we use public permission, as the parent directory will |
1275 # block anyway, this avoid to have to recursively change permissions for | 1292 # block anyway, this avoid to have to recursively change permissions for |
1276 # all sub directories/files when modifying a permission | 1293 # all sub directories/files when modifying a permission |
1277 if not file_data.get('parent'): | 1294 if not file_data.get("parent"): |
1278 raise exceptions.PermissionError() | 1295 raise exceptions.PermissionError() |
1279 else: | 1296 else: |
1280 perm_type = C.ACCESS_TYPE_PUBLIC | 1297 perm_type = C.ACCESS_TYPE_PUBLIC |
1281 if perm_type == C.ACCESS_TYPE_PUBLIC: | 1298 if perm_type == C.ACCESS_TYPE_PUBLIC: |
1282 continue | 1299 continue |
1356 return (parent, []) | 1373 return (parent, []) |
1357 | 1374 |
1358 def get_file_affiliations(self, file_data: dict) -> Dict[jid.JID, str]: | 1375 def get_file_affiliations(self, file_data: dict) -> Dict[jid.JID, str]: |
1359 """Convert file access to pubsub like affiliations""" | 1376 """Convert file access to pubsub like affiliations""" |
1360 affiliations = {} | 1377 affiliations = {} |
1361 access_data = file_data['access'] | 1378 access_data = file_data["access"] |
1362 | 1379 |
1363 read_data = access_data.get(C.ACCESS_PERM_READ, {}) | 1380 read_data = access_data.get(C.ACCESS_PERM_READ, {}) |
1364 if read_data.get('type') == C.ACCESS_TYPE_WHITELIST: | 1381 if read_data.get("type") == C.ACCESS_TYPE_WHITELIST: |
1365 for entity_jid_s in read_data['jids']: | 1382 for entity_jid_s in read_data["jids"]: |
1366 entity_jid = jid.JID(entity_jid_s) | 1383 entity_jid = jid.JID(entity_jid_s) |
1367 affiliations[entity_jid] = 'member' | 1384 affiliations[entity_jid] = "member" |
1368 | 1385 |
1369 write_data = access_data.get(C.ACCESS_PERM_WRITE, {}) | 1386 write_data = access_data.get(C.ACCESS_PERM_WRITE, {}) |
1370 if write_data.get('type') == C.ACCESS_TYPE_WHITELIST: | 1387 if write_data.get("type") == C.ACCESS_TYPE_WHITELIST: |
1371 for entity_jid_s in write_data['jids']: | 1388 for entity_jid_s in write_data["jids"]: |
1372 entity_jid = jid.JID(entity_jid_s) | 1389 entity_jid = jid.JID(entity_jid_s) |
1373 affiliations[entity_jid] = 'publisher' | 1390 affiliations[entity_jid] = "publisher" |
1374 | 1391 |
1375 owner = file_data.get('owner') | 1392 owner = file_data.get("owner") |
1376 if owner: | 1393 if owner: |
1377 affiliations[owner] = 'owner' | 1394 affiliations[owner] = "owner" |
1378 | 1395 |
1379 return affiliations | 1396 return affiliations |
1380 | 1397 |
1381 def _set_file_affiliations_update( | 1398 def _set_file_affiliations_update( |
1382 self, | 1399 self, access: dict, file_data: dict, affiliations: Dict[jid.JID, str] |
1383 access: dict, | |
1384 file_data: dict, | |
1385 affiliations: Dict[jid.JID, str] | |
1386 ) -> None: | 1400 ) -> None: |
1387 read_data = access.setdefault(C.ACCESS_PERM_READ, {}) | 1401 read_data = access.setdefault(C.ACCESS_PERM_READ, {}) |
1388 if read_data.get('type') != C.ACCESS_TYPE_WHITELIST: | 1402 if read_data.get("type") != C.ACCESS_TYPE_WHITELIST: |
1389 read_data['type'] = C.ACCESS_TYPE_WHITELIST | 1403 read_data["type"] = C.ACCESS_TYPE_WHITELIST |
1390 if 'jids' not in read_data: | 1404 if "jids" not in read_data: |
1391 read_data['jids'] = [] | 1405 read_data["jids"] = [] |
1392 read_whitelist = read_data['jids'] | 1406 read_whitelist = read_data["jids"] |
1393 write_data = access.setdefault(C.ACCESS_PERM_WRITE, {}) | 1407 write_data = access.setdefault(C.ACCESS_PERM_WRITE, {}) |
1394 if write_data.get('type') != C.ACCESS_TYPE_WHITELIST: | 1408 if write_data.get("type") != C.ACCESS_TYPE_WHITELIST: |
1395 write_data['type'] = C.ACCESS_TYPE_WHITELIST | 1409 write_data["type"] = C.ACCESS_TYPE_WHITELIST |
1396 if 'jids' not in write_data: | 1410 if "jids" not in write_data: |
1397 write_data['jids'] = [] | 1411 write_data["jids"] = [] |
1398 write_whitelist = write_data['jids'] | 1412 write_whitelist = write_data["jids"] |
1399 for entity_jid, affiliation in affiliations.items(): | 1413 for entity_jid, affiliation in affiliations.items(): |
1400 entity_jid_s = entity_jid.full() | 1414 entity_jid_s = entity_jid.full() |
1401 if affiliation == "none": | 1415 if affiliation == "none": |
1402 try: | 1416 try: |
1403 read_whitelist.remove(entity_jid_s) | 1417 read_whitelist.remove(entity_jid_s) |
1426 raise NotImplementedError('"owner" affiliation can\'t be set') | 1440 raise NotImplementedError('"owner" affiliation can\'t be set') |
1427 else: | 1441 else: |
1428 raise ValueError(f"unknown affiliation: {affiliation!r}") | 1442 raise ValueError(f"unknown affiliation: {affiliation!r}") |
1429 | 1443 |
1430 async def set_file_affiliations( | 1444 async def set_file_affiliations( |
1431 self, | 1445 self, client, file_data: dict, affiliations: Dict[jid.JID, str] |
1432 client, | |
1433 file_data: dict, | |
1434 affiliations: Dict[jid.JID, str] | |
1435 ) -> None: | 1446 ) -> None: |
1436 """Apply pubsub like affiliation to file_data | 1447 """Apply pubsub like affiliation to file_data |
1437 | 1448 |
1438 Affiliations are converted to access types, then set in a whitelist. | 1449 Affiliations are converted to access types, then set in a whitelist. |
1439 Affiliation are mapped as follow: | 1450 Affiliation are mapped as follow: |
1440 - "owner" can't be set (for now) | 1451 - "owner" can't be set (for now) |
1441 - "publisher" gives read and write permissions | 1452 - "publisher" gives read and write permissions |
1442 - "member" gives read permission only | 1453 - "member" gives read permission only |
1443 - "none" removes both read and write permissions | 1454 - "none" removes both read and write permissions |
1444 """ | 1455 """ |
1445 file_id = file_data['id'] | 1456 file_id = file_data["id"] |
1446 await self.file_update( | 1457 await self.file_update( |
1447 file_id, | 1458 file_id, |
1448 'access', | 1459 "access", |
1449 update_cb=partial( | 1460 update_cb=partial( |
1450 self._set_file_affiliations_update, | 1461 self._set_file_affiliations_update, |
1451 file_data=file_data, | 1462 file_data=file_data, |
1452 affiliations=affiliations | 1463 affiliations=affiliations, |
1453 ), | 1464 ), |
1454 ) | 1465 ) |
1455 | 1466 |
1456 def _set_file_access_model_update( | 1467 def _set_file_access_model_update( |
1457 self, | 1468 self, access: dict, file_data: dict, access_model: str |
1458 access: dict, | |
1459 file_data: dict, | |
1460 access_model: str | |
1461 ) -> None: | 1469 ) -> None: |
1462 read_data = access.setdefault(C.ACCESS_PERM_READ, {}) | 1470 read_data = access.setdefault(C.ACCESS_PERM_READ, {}) |
1463 if access_model == "open": | 1471 if access_model == "open": |
1464 requested_type = C.ACCESS_TYPE_PUBLIC | 1472 requested_type = C.ACCESS_TYPE_PUBLIC |
1465 elif access_model == "whitelist": | 1473 elif access_model == "whitelist": |
1466 requested_type = C.ACCESS_TYPE_WHITELIST | 1474 requested_type = C.ACCESS_TYPE_WHITELIST |
1467 else: | 1475 else: |
1468 raise ValueError(f"unknown access model: {access_model}") | 1476 raise ValueError(f"unknown access model: {access_model}") |
1469 | 1477 |
1470 read_data['type'] = requested_type | 1478 read_data["type"] = requested_type |
1471 if requested_type == C.ACCESS_TYPE_WHITELIST and 'jids' not in read_data: | 1479 if requested_type == C.ACCESS_TYPE_WHITELIST and "jids" not in read_data: |
1472 read_data['jids'] = [] | 1480 read_data["jids"] = [] |
1473 | 1481 |
1474 async def set_file_access_model( | 1482 async def set_file_access_model( |
1475 self, | 1483 self, |
1476 client, | 1484 client, |
1477 file_data: dict, | 1485 file_data: dict, |
1481 | 1489 |
1482 Only 2 access models are supported so far: | 1490 Only 2 access models are supported so far: |
1483 - "open": set public access to file/dir | 1491 - "open": set public access to file/dir |
1484 - "whitelist": set whitelist to file/dir | 1492 - "whitelist": set whitelist to file/dir |
1485 """ | 1493 """ |
1486 file_id = file_data['id'] | 1494 file_id = file_data["id"] |
1487 await self.file_update( | 1495 await self.file_update( |
1488 file_id, | 1496 file_id, |
1489 'access', | 1497 "access", |
1490 update_cb=partial( | 1498 update_cb=partial( |
1491 self._set_file_access_model_update, | 1499 self._set_file_access_model_update, |
1492 file_data=file_data, | 1500 file_data=file_data, |
1493 access_model=access_model | 1501 access_model=access_model, |
1494 ), | 1502 ), |
1495 ) | 1503 ) |
1496 | 1504 |
1497 def get_files_owner( | 1505 def get_files_owner( |
1498 self, | 1506 self, |
1499 client, | 1507 client, |
1500 owner: Optional[jid.JID], | 1508 owner: Optional[jid.JID], |
1501 peer_jid: Optional[jid.JID], | 1509 peer_jid: Optional[jid.JID], |
1502 file_id: Optional[str] = None, | 1510 file_id: Optional[str] = None, |
1503 parent: Optional[str] = None | 1511 parent: Optional[str] = None, |
1504 ) -> jid.JID: | 1512 ) -> jid.JID: |
1505 """Get owner to use for a file operation | 1513 """Get owner to use for a file operation |
1506 | 1514 |
1507 if owner is not explicitely set, a suitable one will be used (client.jid for | 1515 if owner is not explicitely set, a suitable one will be used (client.jid for |
1508 clients, peer_jid for components). | 1516 clients, peer_jid for components). |
1524 "Owner must be set for component if peer_jid is None" | 1532 "Owner must be set for component if peer_jid is None" |
1525 ) | 1533 ) |
1526 return peer_jid.userhostJID() | 1534 return peer_jid.userhostJID() |
1527 | 1535 |
1528 async def get_files( | 1536 async def get_files( |
1529 self, client, peer_jid, file_id=None, version=None, parent=None, path=None, | 1537 self, |
1530 type_=None, file_hash=None, hash_algo=None, name=None, namespace=None, | 1538 client, |
1531 mime_type=None, public_id=None, owner=None, access=None, projection=None, | 1539 peer_jid, |
1532 unique=False, perms_to_check=(C.ACCESS_PERM_READ,)): | 1540 file_id=None, |
1541 version=None, | |
1542 parent=None, | |
1543 path=None, | |
1544 type_=None, | |
1545 file_hash=None, | |
1546 hash_algo=None, | |
1547 name=None, | |
1548 namespace=None, | |
1549 mime_type=None, | |
1550 public_id=None, | |
1551 owner=None, | |
1552 access=None, | |
1553 projection=None, | |
1554 unique=False, | |
1555 perms_to_check=(C.ACCESS_PERM_READ,), | |
1556 ): | |
1533 """Retrieve files with with given filters | 1557 """Retrieve files with with given filters |
1534 | 1558 |
1535 @param peer_jid(jid.JID, None): jid trying to access the file | 1559 @param peer_jid(jid.JID, None): jid trying to access the file |
1536 needed to check permission. | 1560 needed to check permission. |
1537 Use None to ignore permission (perms_to_check must be None too) | 1561 Use None to ignore permission (perms_to_check must be None too) |
1626 for file_data in to_remove: | 1650 for file_data in to_remove: |
1627 files.remove(file_data) | 1651 files.remove(file_data) |
1628 return files | 1652 return files |
1629 | 1653 |
1630 async def set_file( | 1654 async def set_file( |
1631 self, client, name, file_id=None, version="", parent=None, path=None, | 1655 self, |
1632 type_=C.FILE_TYPE_FILE, file_hash=None, hash_algo=None, size=None, | 1656 client, |
1633 namespace=None, mime_type=None, public_id=None, created=None, modified=None, | 1657 name, |
1634 owner=None, access=None, extra=None, peer_jid=None, | 1658 file_id=None, |
1635 perms_to_check=(C.ACCESS_PERM_WRITE,) | 1659 version="", |
1660 parent=None, | |
1661 path=None, | |
1662 type_=C.FILE_TYPE_FILE, | |
1663 file_hash=None, | |
1664 hash_algo=None, | |
1665 size=None, | |
1666 namespace=None, | |
1667 mime_type=None, | |
1668 public_id=None, | |
1669 created=None, | |
1670 modified=None, | |
1671 owner=None, | |
1672 access=None, | |
1673 extra=None, | |
1674 peer_jid=None, | |
1675 perms_to_check=(C.ACCESS_PERM_WRITE,), | |
1636 ): | 1676 ): |
1637 """Set a file metadata | 1677 """Set a file metadata |
1638 | 1678 |
1639 @param name(unicode): basename of the file | 1679 @param name(unicode): basename of the file |
1640 @param file_id(unicode): unique id of the file | 1680 @param file_id(unicode): unique id of the file |
1692 if mime_type is None: | 1732 if mime_type is None: |
1693 mime_type, __ = mimetypes.guess_type(name) | 1733 mime_type, __ = mimetypes.guess_type(name) |
1694 else: | 1734 else: |
1695 mime_type = mime_type.lower() | 1735 mime_type = mime_type.lower() |
1696 if public_id is not None: | 1736 if public_id is not None: |
1697 assert len(public_id)>0 | 1737 assert len(public_id) > 0 |
1698 if created is None: | 1738 if created is None: |
1699 created = time.time() | 1739 created = time.time() |
1700 if namespace is not None: | 1740 if namespace is not None: |
1701 namespace = namespace.strip() or None | 1741 namespace = namespace.strip() or None |
1702 if type_ == C.FILE_TYPE_DIRECTORY: | 1742 if type_ == C.FILE_TYPE_DIRECTORY: |
1759 access=access, | 1799 access=access, |
1760 extra=extra, | 1800 extra=extra, |
1761 ) | 1801 ) |
1762 | 1802 |
1763 async def file_get_used_space( | 1803 async def file_get_used_space( |
1764 self, | 1804 self, client, peer_jid: jid.JID, owner: Optional[jid.JID] = None |
1765 client, | |
1766 peer_jid: jid.JID, | |
1767 owner: Optional[jid.JID] = None | |
1768 ) -> int: | 1805 ) -> int: |
1769 """Get space taken by all files owned by an entity | 1806 """Get space taken by all files owned by an entity |
1770 | 1807 |
1771 @param peer_jid: entity requesting the size | 1808 @param peer_jid: entity requesting the size |
1772 @param owner: entity owning the file to check. If None, will be determined by | 1809 @param owner: entity owning the file to check. If None, will be determined by |
1795 self, | 1832 self, |
1796 client, | 1833 client, |
1797 peer_jid: jid.JID, | 1834 peer_jid: jid.JID, |
1798 recursive: bool, | 1835 recursive: bool, |
1799 files_path: Path, | 1836 files_path: Path, |
1800 file_data: dict | 1837 file_data: dict, |
1801 ): | 1838 ): |
1802 """Internal method to delete files/directories recursively | 1839 """Internal method to delete files/directories recursively |
1803 | 1840 |
1804 @param peer_jid(jid.JID): entity requesting the deletion (must be owner of files | 1841 @param peer_jid(jid.JID): entity requesting the deletion (must be owner of files |
1805 to delete) | 1842 to delete) |
1806 @param recursive(boolean): True if recursive deletion is needed | 1843 @param recursive(boolean): True if recursive deletion is needed |
1807 @param files_path(unicode): path of the directory containing the actual files | 1844 @param files_path(unicode): path of the directory containing the actual files |
1808 @param file_data(dict): data of the file to delete | 1845 @param file_data(dict): data of the file to delete |
1809 """ | 1846 """ |
1810 if file_data['owner'] != peer_jid: | 1847 if file_data["owner"] != peer_jid: |
1811 raise exceptions.PermissionError( | 1848 raise exceptions.PermissionError( |
1812 "file {file_name} can't be deleted, {peer_jid} is not the owner" | 1849 "file {file_name} can't be deleted, {peer_jid} is not the owner".format( |
1813 .format(file_name=file_data['name'], peer_jid=peer_jid.full())) | 1850 file_name=file_data["name"], peer_jid=peer_jid.full() |
1814 if file_data['type'] == C.FILE_TYPE_DIRECTORY: | 1851 ) |
1815 sub_files = yield self.get_files(client, peer_jid, parent=file_data['id']) | 1852 ) |
1853 if file_data["type"] == C.FILE_TYPE_DIRECTORY: | |
1854 sub_files = yield self.get_files(client, peer_jid, parent=file_data["id"]) | |
1816 if sub_files and not recursive: | 1855 if sub_files and not recursive: |
1817 raise exceptions.DataError(_("Can't delete directory, it is not empty")) | 1856 raise exceptions.DataError(_("Can't delete directory, it is not empty")) |
1818 # we first delete the sub-files | 1857 # we first delete the sub-files |
1819 for sub_file_data in sub_files: | 1858 for sub_file_data in sub_files: |
1820 if sub_file_data['type'] == C.FILE_TYPE_DIRECTORY: | 1859 if sub_file_data["type"] == C.FILE_TYPE_DIRECTORY: |
1821 sub_file_path = files_path / sub_file_data['name'] | 1860 sub_file_path = files_path / sub_file_data["name"] |
1822 else: | 1861 else: |
1823 sub_file_path = files_path | 1862 sub_file_path = files_path |
1824 yield self._delete_file( | 1863 yield self._delete_file( |
1825 client, peer_jid, recursive, sub_file_path, sub_file_data) | 1864 client, peer_jid, recursive, sub_file_path, sub_file_data |
1865 ) | |
1826 # then the directory itself | 1866 # then the directory itself |
1827 yield self.storage.file_delete(file_data['id']) | 1867 yield self.storage.file_delete(file_data["id"]) |
1828 elif file_data['type'] == C.FILE_TYPE_FILE: | 1868 elif file_data["type"] == C.FILE_TYPE_FILE: |
1829 log.info(_("deleting file {name} with hash {file_hash}").format( | 1869 log.info( |
1830 name=file_data['name'], file_hash=file_data['file_hash'])) | 1870 _("deleting file {name} with hash {file_hash}").format( |
1831 yield self.storage.file_delete(file_data['id']) | 1871 name=file_data["name"], file_hash=file_data["file_hash"] |
1872 ) | |
1873 ) | |
1874 yield self.storage.file_delete(file_data["id"]) | |
1832 references = yield self.get_files( | 1875 references = yield self.get_files( |
1833 client, peer_jid, file_hash=file_data['file_hash']) | 1876 client, peer_jid, file_hash=file_data["file_hash"] |
1877 ) | |
1834 if references: | 1878 if references: |
1835 log.debug("there are still references to the file, we keep it") | 1879 log.debug("there are still references to the file, we keep it") |
1836 else: | 1880 else: |
1837 file_path = os.path.join(files_path, file_data['file_hash']) | 1881 file_path = os.path.join(files_path, file_data["file_hash"]) |
1838 log.info(_("no reference left to {file_path}, deleting").format( | 1882 log.info( |
1839 file_path=file_path)) | 1883 _("no reference left to {file_path}, deleting").format( |
1884 file_path=file_path | |
1885 ) | |
1886 ) | |
1840 try: | 1887 try: |
1841 os.unlink(file_path) | 1888 os.unlink(file_path) |
1842 except FileNotFoundError: | 1889 except FileNotFoundError: |
1843 log.error(f"file at {file_path!r} doesn't exist but it was referenced in files database") | 1890 log.error( |
1891 f"file at {file_path!r} doesn't exist but it was referenced in files database" | |
1892 ) | |
1844 else: | 1893 else: |
1845 raise exceptions.InternalError('Unexpected file type: {file_type}' | 1894 raise exceptions.InternalError( |
1846 .format(file_type=file_data['type'])) | 1895 "Unexpected file type: {file_type}".format(file_type=file_data["type"]) |
1896 ) | |
1847 | 1897 |
1848 async def file_delete(self, client, peer_jid, file_id, recursive=False): | 1898 async def file_delete(self, client, peer_jid, file_id, recursive=False): |
1849 """Delete a single file or a directory and all its sub-files | 1899 """Delete a single file or a directory and all its sub-files |
1850 | 1900 |
1851 @param file_id(unicode): id of the file to delete | 1901 @param file_id(unicode): id of the file to delete |
1855 """ | 1905 """ |
1856 # FIXME: we only allow owner of file to delete files for now, but WRITE access | 1906 # FIXME: we only allow owner of file to delete files for now, but WRITE access |
1857 # should be checked too | 1907 # should be checked too |
1858 files_data = await self.get_files(client, peer_jid, file_id) | 1908 files_data = await self.get_files(client, peer_jid, file_id) |
1859 if not files_data: | 1909 if not files_data: |
1860 raise exceptions.NotFound("Can't find the file with id {file_id}".format( | 1910 raise exceptions.NotFound( |
1861 file_id=file_id)) | 1911 "Can't find the file with id {file_id}".format(file_id=file_id) |
1912 ) | |
1862 file_data = files_data[0] | 1913 file_data = files_data[0] |
1863 if file_data["type"] != C.FILE_TYPE_DIRECTORY and recursive: | 1914 if file_data["type"] != C.FILE_TYPE_DIRECTORY and recursive: |
1864 raise ValueError("recursive can only be set for directories") | 1915 raise ValueError("recursive can only be set for directories") |
1865 files_path = self.host.get_local_path(None, C.FILES_DIR) | 1916 files_path = self.host.get_local_path(None, C.FILES_DIR) |
1866 await self._delete_file(client, peer_jid, recursive, files_path, file_data) | 1917 await self._delete_file(client, peer_jid, recursive, files_path, file_data) |
1877 """ | 1928 """ |
1878 namespace = namespace.strip().lower() | 1929 namespace = namespace.strip().lower() |
1879 return Path( | 1930 return Path( |
1880 self._cache_path, | 1931 self._cache_path, |
1881 regex.path_escape(namespace), | 1932 regex.path_escape(namespace), |
1882 *(regex.path_escape(a) for a in args) | 1933 *(regex.path_escape(a) for a in args), |
1883 ) | 1934 ) |
1884 | 1935 |
1885 ## Notifications ## | 1936 ## Notifications ## |
1886 | |
1887 | 1937 |
1888 def _add_notification( | 1938 def _add_notification( |
1889 self, | 1939 self, |
1890 type_: str, | 1940 type_: str, |
1891 body_plain: str, | 1941 body_plain: str, |
1894 is_global: bool, | 1944 is_global: bool, |
1895 requires_action: bool, | 1945 requires_action: bool, |
1896 priority: str, | 1946 priority: str, |
1897 expire_at: float, | 1947 expire_at: float, |
1898 extra_s: str, | 1948 extra_s: str, |
1899 profile_key: str | 1949 profile_key: str, |
1900 ) -> defer.Deferred: | 1950 ) -> defer.Deferred: |
1901 client = self.host.get_client(profile_key) | 1951 client = self.host.get_client(profile_key) |
1902 | 1952 |
1903 if not client.is_admin: | 1953 if not client.is_admin: |
1904 raise exceptions.PermissionError("Only admins can add a notification") | 1954 raise exceptions.PermissionError("Only admins can add a notification") |
1905 | 1955 |
1906 try: | 1956 try: |
1907 notification_type = NotificationType[type_] | 1957 notification_type = NotificationType[type_] |
1908 notification_priority = NotificationPriority[priority] | 1958 notification_priority = NotificationPriority[priority] |
1909 except KeyError as e: | 1959 except KeyError as e: |
1910 raise exceptions.DataError( | 1960 raise exceptions.DataError(f"invalid notification type or priority data: {e}") |
1911 f"invalid notification type or priority data: {e}" | |
1912 ) | |
1913 | 1961 |
1914 return defer.ensureDeferred( | 1962 return defer.ensureDeferred( |
1915 self.add_notification( | 1963 self.add_notification( |
1916 client, | 1964 client, |
1917 notification_type, | 1965 notification_type, |
1920 title or None, | 1968 title or None, |
1921 is_global, | 1969 is_global, |
1922 requires_action, | 1970 requires_action, |
1923 notification_priority, | 1971 notification_priority, |
1924 expire_at or None, | 1972 expire_at or None, |
1925 data_format.deserialise(extra_s) | 1973 data_format.deserialise(extra_s), |
1926 ) | 1974 ) |
1927 ) | 1975 ) |
1928 | 1976 |
1929 async def add_notification( | 1977 async def add_notification( |
1930 self, | 1978 self, |
1953 @priority: how urgent the notification is | 2001 @priority: how urgent the notification is |
1954 @param expire_at: expiration timestamp for the notification. | 2002 @param expire_at: expiration timestamp for the notification. |
1955 @param extra: additional data. | 2003 @param extra: additional data. |
1956 """ | 2004 """ |
1957 notification = await self.storage.add_notification( | 2005 notification = await self.storage.add_notification( |
1958 None if is_global else client, type_, body_plain, body_rich, title, | 2006 None if is_global else client, |
1959 requires_action, priority, expire_at, extra | 2007 type_, |
2008 body_plain, | |
2009 body_rich, | |
2010 title, | |
2011 requires_action, | |
2012 priority, | |
2013 expire_at, | |
2014 extra, | |
1960 ) | 2015 ) |
1961 self.host.bridge.notification_new( | 2016 self.host.bridge.notification_new( |
1962 str(notification.id), | 2017 str(notification.id), |
1963 notification.timestamp, | 2018 notification.timestamp, |
1964 type_.value, | 2019 type_.value, |
1965 body_plain, | 2020 body_plain, |
1966 body_rich or '', | 2021 body_rich or "", |
1967 title or '', | 2022 title or "", |
1968 requires_action, | 2023 requires_action, |
1969 priority.value, | 2024 priority.value, |
1970 expire_at or 0, | 2025 expire_at or 0, |
1971 data_format.serialise(extra) if extra else '', | 2026 data_format.serialise(extra) if extra else "", |
1972 C.PROF_KEY_ALL if is_global else client.profile | 2027 C.PROF_KEY_ALL if is_global else client.profile, |
1973 ) | 2028 ) |
1974 | 2029 |
1975 def _get_notifications(self, filters_s: str, profile_key: str) -> defer.Deferred: | 2030 def _get_notifications(self, filters_s: str, profile_key: str) -> defer.Deferred: |
1976 """Fetch notifications for bridge with given filters and profile key. | 2031 """Fetch notifications for bridge with given filters and profile key. |
1977 | 2032 |
1990 client = self.host.get_client(profile_key) | 2045 client = self.host.get_client(profile_key) |
1991 | 2046 |
1992 filters = data_format.deserialise(filters_s) | 2047 filters = data_format.deserialise(filters_s) |
1993 | 2048 |
1994 try: | 2049 try: |
1995 if 'type' in filters: | 2050 if "type" in filters: |
1996 filters['type_'] = NotificationType[filters.pop('type')] | 2051 filters["type_"] = NotificationType[filters.pop("type")] |
1997 if 'status' in filters: | 2052 if "status" in filters: |
1998 filters['status'] = NotificationStatus[filters['status']] | 2053 filters["status"] = NotificationStatus[filters["status"]] |
1999 if 'min_priority' in filters: | 2054 if "min_priority" in filters: |
2000 filters['min_priority'] = NotificationPriority[filters['min_priority']].value | 2055 filters["min_priority"] = NotificationPriority[ |
2056 filters["min_priority"] | |
2057 ].value | |
2001 except KeyError as e: | 2058 except KeyError as e: |
2002 raise exceptions.DataError(f"invalid filter data: {e}") | 2059 raise exceptions.DataError(f"invalid filter data: {e}") |
2003 | 2060 |
2004 d = defer.ensureDeferred(self.storage.get_notifications(client, **filters)) | 2061 d = defer.ensureDeferred(self.storage.get_notifications(client, **filters)) |
2005 d.addCallback( | 2062 d.addCallback( |
2008 ) | 2065 ) |
2009 ) | 2066 ) |
2010 return d | 2067 return d |
2011 | 2068 |
2012 def _delete_notification( | 2069 def _delete_notification( |
2013 self, | 2070 self, id_: str, is_global: bool, profile_key: str |
2014 id_: str, | |
2015 is_global: bool, | |
2016 profile_key: str | |
2017 ) -> defer.Deferred: | 2071 ) -> defer.Deferred: |
2018 client = self.host.get_client(profile_key) | 2072 client = self.host.get_client(profile_key) |
2019 if is_global and not client.is_admin: | 2073 if is_global and not client.is_admin: |
2020 raise exceptions.PermissionError( | 2074 raise exceptions.PermissionError( |
2021 "Only admins can delete global notifications" | 2075 "Only admins can delete global notifications" |
2022 ) | 2076 ) |
2023 return defer.ensureDeferred(self.delete_notification(client, id_, is_global)) | 2077 return defer.ensureDeferred(self.delete_notification(client, id_, is_global)) |
2024 | 2078 |
2025 async def delete_notification( | 2079 async def delete_notification( |
2026 self, | 2080 self, client: SatXMPPEntity, id_: str, is_global: bool = False |
2027 client: SatXMPPEntity, | |
2028 id_: str, | |
2029 is_global: bool=False | |
2030 ) -> None: | 2081 ) -> None: |
2031 """Delete a notification | 2082 """Delete a notification |
2032 | 2083 |
2033 the notification must be from the requesting profile. | 2084 the notification must be from the requesting profile. |
2034 @param id_: ID of the notification | 2085 @param id_: ID of the notification |
2035 is_global: if True, a global notification will be removed. | 2086 is_global: if True, a global notification will be removed. |
2036 """ | 2087 """ |
2037 await self.storage.delete_notification(None if is_global else client, id_) | 2088 await self.storage.delete_notification(None if is_global else client, id_) |
2038 self.host.bridge.notification_deleted( | 2089 self.host.bridge.notification_deleted( |
2039 id_, | 2090 id_, C.PROF_KEY_ALL if is_global else client.profile |
2040 C.PROF_KEY_ALL if is_global else client.profile | |
2041 ) | 2091 ) |
2042 | 2092 |
2043 def _notifications_expired_clean( | 2093 def _notifications_expired_clean( |
2044 self, limit_timestamp: float, profile_key: str | 2094 self, limit_timestamp: float, profile_key: str |
2045 ) -> defer.Deferred: | 2095 ) -> defer.Deferred: |
2048 else: | 2098 else: |
2049 client = self.host.get_client(profile_key) | 2099 client = self.host.get_client(profile_key) |
2050 | 2100 |
2051 return defer.ensureDeferred( | 2101 return defer.ensureDeferred( |
2052 self.storage.clean_expired_notifications( | 2102 self.storage.clean_expired_notifications( |
2053 client, | 2103 client, None if limit_timestamp == -1.0 else limit_timestamp |
2054 None if limit_timestamp == -1.0 else limit_timestamp | 2104 ) |
2055 ) | 2105 ) |
2056 ) | |
2057 | |
2058 | 2106 |
2059 ## Misc ## | 2107 ## Misc ## |
2060 | 2108 |
2061 def is_entity_available(self, client, entity_jid): | 2109 def is_entity_available(self, client, entity_jid): |
2062 """Tell from the presence information if the given entity is available. | 2110 """Tell from the presence information if the given entity is available. |