diff sat/plugins/plugin_misc_identity.py @ 3816:213e83a4ed10

plugin identity, XEP-0054: move avatar resizing and caching method to identity plugin: resizing and caching is now done in identity plugin, to prepare for the implementation of other XEP to handle avatars. rel 368
author Goffi <goffi@goffi.org>
date Wed, 29 Jun 2022 11:47:48 +0200
parents 888109774673
children 998c5318230f
line wrap: on
line diff
--- a/sat/plugins/plugin_misc_identity.py	Wed Jun 29 11:36:31 2022 +0200
+++ b/sat/plugins/plugin_misc_identity.py	Wed Jun 29 11:47:48 2022 +0200
@@ -15,21 +15,34 @@
 # You should have received a copy of the GNU Affero General Public License
 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-from typing import Dict, List, Union, Coroutine, Any, Optional
 from collections import namedtuple
+import io
 from pathlib import Path
-from twisted.internet import defer
+from base64 import b64encode
+import hashlib
+from typing import Any, Coroutine, Dict, List, Optional, Union
+
+from twisted.internet import defer, threads
 from twisted.words.protocols.jabber import jid
-from sat.core.xmpp import SatXMPPEntity
-from sat.core.i18n import _
+
+from sat.core import exceptions
 from sat.core.constants import Const as C
-from sat.core import exceptions
+from sat.core.i18n import _
 from sat.core.log import getLogger
+from sat.core.xmpp import SatXMPPEntity
 from sat.memory import persistent
 from sat.tools import image
 from sat.tools import utils
 from sat.tools.common import data_format
 
+try:
+    from PIL import Image
+except:
+    raise exceptions.MissingModule(
+        "Missing module pillow, please download/install it from https://python-pillow.github.io"
+    )
+
+
 
 log = getLogger(__name__)
 
@@ -50,6 +63,7 @@
 }
 
 Callback = namedtuple("Callback", ("origin", "get", "set", "priority"))
+AVATAR_DIM = (128, 128)
 
 
 class Identity:
@@ -474,6 +488,68 @@
         return defer.ensureDeferred(
             self.set(client, "avatar", file_path, entity))
 
+    def _blockingCacheAvatar(
+        self,
+        source: str,
+        avatar_data: dict[str, Any]
+    ):
+        """This method is executed in a separated thread"""
+        if avatar_data["media_type"] == "image/svg+xml":
+            # for vector image, we save directly
+            img_buf = open(avatar_data["path"], "rb")
+        else:
+            # for bitmap image, we check size and resize if necessary
+            try:
+                img = Image.open(avatar_data["path"])
+            except IOError as e:
+                raise exceptions.DataError(f"Can't open image: {e}")
+
+            if img.size != AVATAR_DIM:
+                img.thumbnail(AVATAR_DIM)
+                if img.size[0] != img.size[1]:  # we need to crop first
+                    left, upper = (0, 0)
+                    right, lower = img.size
+                    offset = abs(right - lower) / 2
+                    if right == min(img.size):
+                        upper += offset
+                        lower -= offset
+                    else:
+                        left += offset
+                        right -= offset
+                    img = img.crop((left, upper, right, lower))
+            img_buf = io.BytesIO()
+            # PNG is well supported among clients, so we convert to this format
+            img.save(img_buf, "PNG")
+            img_buf.seek(0)
+            avatar_data["media_type"] = "image/png"
+
+        media_type = avatar_data["media_type"]
+        avatar_data["base64"] = image_b64 = b64encode(img_buf.read()).decode()
+        img_buf.seek(0)
+        image_hash = hashlib.sha1(img_buf.read()).hexdigest()
+        img_buf.seek(0)
+        with self.host.common_cache.cacheData(
+            source, image_hash, media_type
+        ) as f:
+            f.write(img_buf.read())
+            avatar_data['path'] = Path(f.name)
+            avatar_data['filename'] = avatar_data['path'].name
+        avatar_data['cache_uid'] = image_hash
+
+    async def cacheAvatar(self, source: str, avatar_data: Dict[str, Any]) -> None:
+        """Resize if necessary and cache avatar
+
+        @param source: source importing the avatar (usually it is plugin's import name),
+            will be used in cache metadata
+        @param avatar_data: avatar metadata as build by [avatarSetDataFilter]
+            will be updated with following keys:
+                path: updated path using cached file
+                filename: updated filename using cached file
+                base64: resized and base64 encoded avatar
+                cache_uid: SHA1 hash used as cache unique ID
+        """
+        await threads.deferToThread(self._blockingCacheAvatar, source, avatar_data)
+
     async def avatarSetDataFilter(self, client, entity, file_path):
         """Convert avatar file path to dict data"""
         file_path = Path(file_path)
@@ -489,13 +565,19 @@
             raise ValueError(f"Can't identify type of image at {file_path}")
         if not media_type.startswith('image/'):
             raise ValueError(f"File at {file_path} doesn't appear to be an image")
+        await self.cacheAvatar(IMPORT_NAME, avatar_data)
         return avatar_data
 
     async def avatarSetPostTreatment(self, client, entity, avatar_data):
         """Update our own avatar"""
         await self.update(client, IMPORT_NAME, "avatar", avatar_data, entity)
 
-    def avatarBuildMetadata(self, path, media_type=None, cache_uid=None):
+    def avatarBuildMetadata(
+            self,
+            path: Path,
+            media_type: Optional[str] = None,
+            cache_uid: Optional[str] = None
+    ) -> Optional[Dict[str, Union[str, Path, None]]]:
         """Helper method to generate avatar metadata
 
         @param path(str, Path, None): path to avatar file