changeset 2109:85f3e12e984d

core (memory/cache): file caching handling, first draft: instead of having file caching handled individually by plugins, a generic module has been added in memory. - Cache can be global or associated to a profile. In the later case, client.cache can be used. - Cache are managed with unique ids (which can be any unique unicode, hash uuid, or something else). - To know if a file is in cache, getFilePath is used: if the file is in cache, its absolute path is returned, else None is returned. - To cache a file, cacheData is used with at list the source of cache (most of time plugin import name), and unique id. The method return file opened in binary writing mode (so cacheData can - and should - be used with "with" statement). - 2 files will be created: a metadata file (named after the unique id), and the actual file. - each file has a end of life time, after it, the cache is invalidated and the file must be requested again.
author Goffi <goffi@goffi.org>
date Thu, 05 Jan 2017 20:23:38 +0100
parents 70f23bc7859b
children 2d633b3c923d
files src/core/constants.py src/core/xmpp.py src/memory/cache.py
diffstat 3 files changed, 130 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- a/src/core/constants.py	Tue Jan 03 18:51:50 2017 +0100
+++ b/src/core/constants.py	Thu Jan 05 20:23:38 2017 +0100
@@ -132,6 +132,9 @@
     NS_DELAY = 'urn:xmpp:delay'
     NS_XHTML = 'http://www.w3.org/1999/xhtml'
 
+    ## Directories ##
+    CACHE_DIR = u'cache'
+
 
     ## Configuration ##
     if BaseDirectory:  # skipped when xdg module is not available (should not happen in backend)
@@ -281,6 +284,7 @@
     ENV_PREFIX = 'SAT_' # Prefix used for environment variables
     IGNORE = 'ignore'
     NO_LIMIT = -1 # used in bridge when a integer value is expected
+    DEFAULT_MAX_AGE = 1209600 # default max age of cached files, in seconds
 
 
     ## ANSI escape sequences ##
--- a/src/core/xmpp.py	Tue Jan 03 18:51:50 2017 +0100
+++ b/src/core/xmpp.py	Thu Jan 05 20:23:38 2017 +0100
@@ -19,6 +19,7 @@
 
 from sat.core.i18n import _
 from sat.core.constants import Const as C
+from sat.memory import cache
 from twisted.internet import task, defer
 from twisted.words.protocols.jabber.xmlstream import XMPPHandler
 from twisted.words.protocols.jabber import xmlstream
@@ -60,6 +61,7 @@
         self.__connected = False
         self.profile = profile
         self.host_app = host_app
+        self.cache = cache.Cache(host_app, profile)
         self._mess_id_uid = {} # map from message id to uid use in history. Key: (full_jid,message_id) Value: uid
         self.conn_deferred = defer.Deferred()
         self._progress_cb = {}  # callback called when a progress is requested (key = progress id)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/memory/cache.py	Thu Jan 05 20:23:38 2017 +0100
@@ -0,0 +1,124 @@
+#!/usr/bin/env python2
+# -*- coding: utf-8 -*-
+
+# SAT: a jabber client
+# Copyright (C) 2009-2016 Jérôme Poisson (goffi@goffi.org)
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+
+# You should have received a copy of the GNU Affero General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+from sat.core.log import getLogger
+log = getLogger(__name__)
+from sat.core import exceptions
+from sat.core.constants import Const as C
+import cPickle as pickle
+import mimetypes
+import os.path
+import time
+
+
+class Cache(object):
+    """generic file caching"""
+
+    def __init__(self, host, profile=None):
+        host = host
+        self.profile = profile
+        self.cache_dir = os.path.join(
+            host.memory.getConfig('', 'local_dir'),
+            C.CACHE_DIR,
+            profile or '')
+        if not os.path.exists(self.cache_dir):
+            os.makedirs(self.cache_dir)
+
+    def getPath(self, filename):
+        """return cached file URL
+
+        @param filename(unicode): cached file name (cache data or actual file)
+        """
+        if not filename or u'/' in filename:
+            log.error(u"invalid char found in file name, hack attempt? name:{}".format(filename))
+            raise exceptions.DataError(u"Invalid char found")
+        return os.path.join(self.cache_dir, filename)
+
+    def getFilePath(self, uid):
+        """retrieve absolute path to file
+
+        @param uid(unicode): unique identifier of file
+        @return (unicode, None): absolute path to cached file
+            None if file is not in cache (or cache is invalid)
+        """
+        cache_url = self.getPath(uid)
+        if not os.path.exists(cache_url):
+            return None
+
+        try:
+            with open(cache_url, 'rb') as f:
+                cache_data = pickle.load(f)
+        except IOError:
+            log.warning(u"can't read cache at {}".format(cache_url))
+            return None
+
+        except pickle.UnpicklingError:
+            log.warning(u'invalid cache found at {}'.format(cache_url))
+            return None
+
+        try:
+            eol = cache_data['eol']
+        except KeyError:
+            log.warning(u'no End Of Life found for cached file {}'.format(uid))
+            eol = 0
+        if eol < time.time():
+            log.debug(u"removing expired cache (expired for {}s)".format(
+                time.time() - eol))
+            return None
+
+        return self.getPath(cache_data['filename'])
+
+    def cacheData(self, source, uid, mime_type=u'', max_age=None, filename=None):
+        """create cache metadata and file object to use for actual data
+
+        @param source(unicode): source of the cache (should be plugin's import_name)
+        @param uid(unicode): an identifier of the file which must be unique
+        @param mime_type(unicode): MIME type of the file to cache
+            it will be used notably to guess file extension
+        @param max_age(int, None): maximum age in seconds
+            the cache metadata will have an "eol" (end of life)
+            None to use default value
+            0 to ignore cache (file will be re-downloaded on each access)
+        @param filename: if not None, will be used as filename
+            else one will be generated from uid and guessed extension
+        @return(file): file object opened in write mode
+            you have to close it yourself (hint: use with statement)
+        """
+        # FIXME: is it needed to use a separate thread?
+        #        probably not with the little data expected with BoB
+        cache_url = self.getPath(uid)
+        ext = mimetypes.guess_extension(mime_type, strict=False)
+        if ext is None:
+            log.warning(u"can't find extension for MIME type {}".format(mime_type))
+            ext = '.dump'
+        if filename is None:
+            filename = uid + ext
+        if max_age is None:
+            max_age = C.DEFAULT_MAX_AGE
+        cache_data = {'source': source,
+                      'filename': filename,
+                      'eol': int(time.time()) + max_age,
+                      'mime_type': mime_type,
+                      }
+        file_path = self.getPath(filename)
+
+        with open(cache_url, 'wb') as f:
+            pickle.dump(cache_data, f, protocol=2)
+
+        return open(file_path, 'wb')