view src/memory/cache.py @ 2157:b4a515e36631

jp (blog): added blog/get command: - blog/get will retrieve and parse blog items on specified service/node and can request only one or more specific items according to ids of max value - it takes profit of outputs and the new extra_outputs: a default output display microblogs as key/value, a fancy one display it in a more confortable way for reading it the terminal - for default output, keys can be filtered using --key argument
author Goffi <goffi@goffi.org>
date Thu, 16 Feb 2017 01:02:33 +0100
parents 766dbbec56f2
children 1b3fbb76984b
line wrap: on
line source

#!/usr/bin/env python2
# -*- coding: utf-8 -*-

# SAT: a jabber client
# Copyright (C) 2009-2016 Jérôme Poisson (goffi@goffi.org)

# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.

# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.

# You should have received a copy of the GNU Affero General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

from sat.core.log import getLogger
log = getLogger(__name__)
from sat.core import exceptions
from sat.core.constants import Const as C
import cPickle as pickle
import mimetypes
import os.path
import time


class Cache(object):
    """generic file caching"""

    def __init__(self, host, profile=None):
        host = host
        self.profile = profile
        self.cache_dir = os.path.join(
            host.memory.getConfig('', 'local_dir'),
            C.CACHE_DIR,
            profile or '')
        if not os.path.exists(self.cache_dir):
            os.makedirs(self.cache_dir)

    def getPath(self, filename):
        """return cached file URL

        @param filename(unicode): cached file name (cache data or actual file)
        """
        if not filename or u'/' in filename:
            log.error(u"invalid char found in file name, hack attempt? name:{}".format(filename))
            raise exceptions.DataError(u"Invalid char found")
        return os.path.join(self.cache_dir, filename)

    def getFilePath(self, uid):
        """retrieve absolute path to file

        @param uid(unicode): unique identifier of file
        @return (unicode, None): absolute path to cached file
            None if file is not in cache (or cache is invalid)
        """
        uid = uid.strip()
        if not uid:
            return None
        cache_url = self.getPath(uid)
        if not os.path.exists(cache_url):
            return None

        try:
            with open(cache_url, 'rb') as f:
                cache_data = pickle.load(f)
        except IOError:
            log.warning(u"can't read cache at {}".format(cache_url))
            return None

        except pickle.UnpicklingError:
            log.warning(u'invalid cache found at {}'.format(cache_url))
            return None

        try:
            eol = cache_data['eol']
        except KeyError:
            log.warning(u'no End Of Life found for cached file {}'.format(uid))
            eol = 0
        if eol < time.time():
            log.debug(u"removing expired cache (expired for {}s)".format(
                time.time() - eol))
            return None

        return self.getPath(cache_data['filename'])

    def cacheData(self, source, uid, mime_type=u'', max_age=None, filename=None):
        """create cache metadata and file object to use for actual data

        @param source(unicode): source of the cache (should be plugin's import_name)
        @param uid(unicode): an identifier of the file which must be unique
        @param mime_type(unicode): MIME type of the file to cache
            it will be used notably to guess file extension
        @param max_age(int, None): maximum age in seconds
            the cache metadata will have an "eol" (end of life)
            None to use default value
            0 to ignore cache (file will be re-downloaded on each access)
        @param filename: if not None, will be used as filename
            else one will be generated from uid and guessed extension
        @return(file): file object opened in write mode
            you have to close it yourself (hint: use with statement)
        """
        # FIXME: is it needed to use a separate thread?
        #        probably not with the little data expected with BoB
        cache_url = self.getPath(uid)
        ext = mimetypes.guess_extension(mime_type, strict=False)
        if ext is None:
            log.warning(u"can't find extension for MIME type {}".format(mime_type))
            ext = '.dump'
        if filename is None:
            filename = uid + ext
        if max_age is None:
            max_age = C.DEFAULT_MAX_AGE
        cache_data = {'source': source,
                      'filename': filename,
                      'eol': int(time.time()) + max_age,
                      'mime_type': mime_type,
                      }
        file_path = self.getPath(filename)

        with open(cache_url, 'wb') as f:
            pickle.dump(cache_data, f, protocol=2)

        return open(file_path, 'wb')