summaryrefslogtreecommitdiff
path: root/lib/python2.7/site-packages/django/core/cache
diff options
context:
space:
mode:
Diffstat (limited to 'lib/python2.7/site-packages/django/core/cache')
-rw-r--r--lib/python2.7/site-packages/django/core/cache/__init__.py138
-rw-r--r--lib/python2.7/site-packages/django/core/cache/backends/__init__.py0
-rw-r--r--lib/python2.7/site-packages/django/core/cache/backends/base.py235
-rw-r--r--lib/python2.7/site-packages/django/core/cache/backends/db.py205
-rw-r--r--lib/python2.7/site-packages/django/core/cache/backends/dummy.py46
-rw-r--r--lib/python2.7/site-packages/django/core/cache/backends/filebased.py160
-rw-r--r--lib/python2.7/site-packages/django/core/cache/backends/locmem.py140
-rw-r--r--lib/python2.7/site-packages/django/core/cache/backends/memcached.py190
-rw-r--r--lib/python2.7/site-packages/django/core/cache/utils.py15
9 files changed, 1129 insertions, 0 deletions
diff --git a/lib/python2.7/site-packages/django/core/cache/__init__.py b/lib/python2.7/site-packages/django/core/cache/__init__.py
new file mode 100644
index 0000000..ea3a68f
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/__init__.py
@@ -0,0 +1,138 @@
+"""
+Caching framework.
+
+This package defines set of cache backends that all conform to a simple API.
+In a nutshell, a cache is a set of values -- which can be any object that
+may be pickled -- identified by string keys. For the complete API, see
+the abstract BaseCache class in django.core.cache.backends.base.
+
+Client code should not access a cache backend directly; instead it should
+either use the "cache" variable made available here, or it should use the
+get_cache() function made available here. get_cache() takes a backend URI
+(e.g. "memcached://127.0.0.1:11211/") and returns an instance of a backend
+cache class.
+
+See docs/topics/cache.txt for information on the public API.
+"""
+
+from django.conf import settings
+from django.core import signals
+from django.core.cache.backends.base import (
+ InvalidCacheBackendError, CacheKeyWarning, BaseCache)
+from django.core.exceptions import ImproperlyConfigured
+from django.utils import importlib
+from django.utils.module_loading import import_by_path
+from django.utils.six.moves.urllib.parse import parse_qsl
+
+
+__all__ = [
+ 'get_cache', 'cache', 'DEFAULT_CACHE_ALIAS'
+]
+
+# Name for use in settings file --> name of module in "backends" directory.
+# Any backend scheme that is not in this dictionary is treated as a Python
+# import path to a custom backend.
+BACKENDS = {
+ 'memcached': 'memcached',
+ 'locmem': 'locmem',
+ 'file': 'filebased',
+ 'db': 'db',
+ 'dummy': 'dummy',
+}
+
+DEFAULT_CACHE_ALIAS = 'default'
+
+def parse_backend_uri(backend_uri):
+ """
+ Converts the "backend_uri" into a cache scheme ('db', 'memcached', etc), a
+ host and any extra params that are required for the backend. Returns a
+ (scheme, host, params) tuple.
+ """
+ if backend_uri.find(':') == -1:
+ raise InvalidCacheBackendError("Backend URI must start with scheme://")
+ scheme, rest = backend_uri.split(':', 1)
+ if not rest.startswith('//'):
+ raise InvalidCacheBackendError("Backend URI must start with scheme://")
+
+ host = rest[2:]
+ qpos = rest.find('?')
+ if qpos != -1:
+ params = dict(parse_qsl(rest[qpos+1:]))
+ host = rest[2:qpos]
+ else:
+ params = {}
+ if host.endswith('/'):
+ host = host[:-1]
+
+ return scheme, host, params
+
+if DEFAULT_CACHE_ALIAS not in settings.CACHES:
+ raise ImproperlyConfigured("You must define a '%s' cache" % DEFAULT_CACHE_ALIAS)
+
+def parse_backend_conf(backend, **kwargs):
+ """
+ Helper function to parse the backend configuration
+ that doesn't use the URI notation.
+ """
+ # Try to get the CACHES entry for the given backend name first
+ conf = settings.CACHES.get(backend, None)
+ if conf is not None:
+ args = conf.copy()
+ args.update(kwargs)
+ backend = args.pop('BACKEND')
+ location = args.pop('LOCATION', '')
+ return backend, location, args
+ else:
+ try:
+ # Trying to import the given backend, in case it's a dotted path
+ backend_cls = import_by_path(backend)
+ except ImproperlyConfigured as e:
+ raise InvalidCacheBackendError("Could not find backend '%s': %s" % (
+ backend, e))
+ location = kwargs.pop('LOCATION', '')
+ return backend, location, kwargs
+
+def get_cache(backend, **kwargs):
+ """
+ Function to load a cache backend dynamically. This is flexible by design
+ to allow different use cases:
+
+ To load a backend with the old URI-based notation::
+
+ cache = get_cache('locmem://')
+
+ To load a backend that is pre-defined in the settings::
+
+ cache = get_cache('default')
+
+ To load a backend with its dotted import path,
+ including arbitrary options::
+
+ cache = get_cache('django.core.cache.backends.memcached.MemcachedCache', **{
+ 'LOCATION': '127.0.0.1:11211', 'TIMEOUT': 30,
+ })
+
+ """
+ try:
+ if '://' in backend:
+ # for backwards compatibility
+ backend, location, params = parse_backend_uri(backend)
+ if backend in BACKENDS:
+ backend = 'django.core.cache.backends.%s' % BACKENDS[backend]
+ params.update(kwargs)
+ mod = importlib.import_module(backend)
+ backend_cls = mod.CacheClass
+ else:
+ backend, location, params = parse_backend_conf(backend, **kwargs)
+ backend_cls = import_by_path(backend)
+ except (AttributeError, ImportError, ImproperlyConfigured) as e:
+ raise InvalidCacheBackendError(
+ "Could not find backend '%s': %s" % (backend, e))
+ cache = backend_cls(location, params)
+ # Some caches -- python-memcached in particular -- need to do a cleanup at the
+ # end of a request cycle. If not implemented in a particular backend
+ # cache.close is a no-op
+ signals.request_finished.connect(cache.close)
+ return cache
+
+cache = get_cache(DEFAULT_CACHE_ALIAS)
diff --git a/lib/python2.7/site-packages/django/core/cache/backends/__init__.py b/lib/python2.7/site-packages/django/core/cache/backends/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/backends/__init__.py
diff --git a/lib/python2.7/site-packages/django/core/cache/backends/base.py b/lib/python2.7/site-packages/django/core/cache/backends/base.py
new file mode 100644
index 0000000..deb98e7
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/backends/base.py
@@ -0,0 +1,235 @@
+"Base Cache class."
+from __future__ import unicode_literals
+
+import warnings
+
+from django.core.exceptions import ImproperlyConfigured, DjangoRuntimeWarning
+from django.utils.module_loading import import_by_path
+
+
+class InvalidCacheBackendError(ImproperlyConfigured):
+ pass
+
+
+class CacheKeyWarning(DjangoRuntimeWarning):
+ pass
+
+
+# Stub class to ensure not passing in a `timeout` argument results in
+# the default timeout
+DEFAULT_TIMEOUT = object()
+
+# Memcached does not accept keys longer than this.
+MEMCACHE_MAX_KEY_LENGTH = 250
+
+
+def default_key_func(key, key_prefix, version):
+ """
+ Default function to generate keys.
+
+ Constructs the key used by all other methods. By default it prepends
+ the `key_prefix'. KEY_FUNCTION can be used to specify an alternate
+ function with custom key making behavior.
+ """
+ return '%s:%s:%s' % (key_prefix, version, key)
+
+
+def get_key_func(key_func):
+ """
+ Function to decide which key function to use.
+
+ Defaults to ``default_key_func``.
+ """
+ if key_func is not None:
+ if callable(key_func):
+ return key_func
+ else:
+ return import_by_path(key_func)
+ return default_key_func
+
+
+class BaseCache(object):
+ def __init__(self, params):
+ timeout = params.get('timeout', params.get('TIMEOUT', 300))
+ try:
+ timeout = int(timeout)
+ except (ValueError, TypeError):
+ timeout = 300
+ self.default_timeout = timeout
+
+ options = params.get('OPTIONS', {})
+ max_entries = params.get('max_entries', options.get('MAX_ENTRIES', 300))
+ try:
+ self._max_entries = int(max_entries)
+ except (ValueError, TypeError):
+ self._max_entries = 300
+
+ cull_frequency = params.get('cull_frequency', options.get('CULL_FREQUENCY', 3))
+ try:
+ self._cull_frequency = int(cull_frequency)
+ except (ValueError, TypeError):
+ self._cull_frequency = 3
+
+ self.key_prefix = params.get('KEY_PREFIX', '')
+ self.version = params.get('VERSION', 1)
+ self.key_func = get_key_func(params.get('KEY_FUNCTION', None))
+
+ def make_key(self, key, version=None):
+ """Constructs the key used by all other methods. By default it
+ uses the key_func to generate a key (which, by default,
+ prepends the `key_prefix' and 'version'). An different key
+ function can be provided at the time of cache construction;
+ alternatively, you can subclass the cache backend to provide
+ custom key making behavior.
+ """
+ if version is None:
+ version = self.version
+
+ new_key = self.key_func(key, self.key_prefix, version)
+ return new_key
+
+ def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ """
+ Set a value in the cache if the key does not already exist. If
+ timeout is given, that timeout will be used for the key; otherwise
+ the default cache timeout will be used.
+
+ Returns True if the value was stored, False otherwise.
+ """
+ raise NotImplementedError
+
+ def get(self, key, default=None, version=None):
+ """
+ Fetch a given key from the cache. If the key does not exist, return
+ default, which itself defaults to None.
+ """
+ raise NotImplementedError
+
+ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ """
+ Set a value in the cache. If timeout is given, that timeout will be
+ used for the key; otherwise the default cache timeout will be used.
+ """
+ raise NotImplementedError
+
+ def delete(self, key, version=None):
+ """
+ Delete a key from the cache, failing silently.
+ """
+ raise NotImplementedError
+
+ def get_many(self, keys, version=None):
+ """
+ Fetch a bunch of keys from the cache. For certain backends (memcached,
+ pgsql) this can be *much* faster when fetching multiple values.
+
+ Returns a dict mapping each key in keys to its value. If the given
+ key is missing, it will be missing from the response dict.
+ """
+ d = {}
+ for k in keys:
+ val = self.get(k, version=version)
+ if val is not None:
+ d[k] = val
+ return d
+
+ def has_key(self, key, version=None):
+ """
+ Returns True if the key is in the cache and has not expired.
+ """
+ return self.get(key, version=version) is not None
+
+ def incr(self, key, delta=1, version=None):
+ """
+ Add delta to value in the cache. If the key does not exist, raise a
+ ValueError exception.
+ """
+ value = self.get(key, version=version)
+ if value is None:
+ raise ValueError("Key '%s' not found" % key)
+ new_value = value + delta
+ self.set(key, new_value, version=version)
+ return new_value
+
+ def decr(self, key, delta=1, version=None):
+ """
+ Subtract delta from value in the cache. If the key does not exist, raise
+ a ValueError exception.
+ """
+ return self.incr(key, -delta, version=version)
+
+ def __contains__(self, key):
+ """
+ Returns True if the key is in the cache and has not expired.
+ """
+ # This is a separate method, rather than just a copy of has_key(),
+ # so that it always has the same functionality as has_key(), even
+ # if a subclass overrides it.
+ return self.has_key(key)
+
+ def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
+ """
+ Set a bunch of values in the cache at once from a dict of key/value
+ pairs. For certain backends (memcached), this is much more efficient
+ than calling set() multiple times.
+
+ If timeout is given, that timeout will be used for the key; otherwise
+ the default cache timeout will be used.
+ """
+ for key, value in data.items():
+ self.set(key, value, timeout=timeout, version=version)
+
+ def delete_many(self, keys, version=None):
+ """
+ Set a bunch of values in the cache at once. For certain backends
+ (memcached), this is much more efficient than calling delete() multiple
+ times.
+ """
+ for key in keys:
+ self.delete(key, version=version)
+
+ def clear(self):
+ """Remove *all* values from the cache at once."""
+ raise NotImplementedError
+
+ def validate_key(self, key):
+ """
+ Warn about keys that would not be portable to the memcached
+ backend. This encourages (but does not force) writing backend-portable
+ cache code.
+
+ """
+ if len(key) > MEMCACHE_MAX_KEY_LENGTH:
+ warnings.warn('Cache key will cause errors if used with memcached: '
+ '%s (longer than %s)' % (key, MEMCACHE_MAX_KEY_LENGTH),
+ CacheKeyWarning)
+ for char in key:
+ if ord(char) < 33 or ord(char) == 127:
+ warnings.warn('Cache key contains characters that will cause '
+ 'errors if used with memcached: %r' % key,
+ CacheKeyWarning)
+
+ def incr_version(self, key, delta=1, version=None):
+ """Adds delta to the cache version for the supplied key. Returns the
+ new version.
+ """
+ if version is None:
+ version = self.version
+
+ value = self.get(key, version=version)
+ if value is None:
+ raise ValueError("Key '%s' not found" % key)
+
+ self.set(key, value, version=version+delta)
+ self.delete(key, version=version)
+ return version+delta
+
+ def decr_version(self, key, delta=1, version=None):
+ """Substracts delta from the cache version for the supplied key. Returns
+ the new version.
+ """
+ return self.incr_version(key, -delta, version)
+
+ def close(self, **kwargs):
+ """Close the cache connection"""
+ pass
diff --git a/lib/python2.7/site-packages/django/core/cache/backends/db.py b/lib/python2.7/site-packages/django/core/cache/backends/db.py
new file mode 100644
index 0000000..5c9d37b
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/backends/db.py
@@ -0,0 +1,205 @@
+"Database cache backend."
+import base64
+import time
+from datetime import datetime
+
+try:
+ from django.utils.six.moves import cPickle as pickle
+except ImportError:
+ import pickle
+
+from django.conf import settings
+from django.core.cache.backends.base import BaseCache, DEFAULT_TIMEOUT
+from django.db import connections, transaction, router, DatabaseError
+from django.db.backends.util import typecast_timestamp
+from django.utils import timezone, six
+from django.utils.encoding import force_bytes
+
+
+class Options(object):
+ """A class that will quack like a Django model _meta class.
+
+ This allows cache operations to be controlled by the router
+ """
+ def __init__(self, table):
+ self.db_table = table
+ self.app_label = 'django_cache'
+ self.model_name = 'cacheentry'
+ self.verbose_name = 'cache entry'
+ self.verbose_name_plural = 'cache entries'
+ self.object_name = 'CacheEntry'
+ self.abstract = False
+ self.managed = True
+ self.proxy = False
+
+class BaseDatabaseCache(BaseCache):
+ def __init__(self, table, params):
+ BaseCache.__init__(self, params)
+ self._table = table
+
+ class CacheEntry(object):
+ _meta = Options(table)
+ self.cache_model_class = CacheEntry
+
+class DatabaseCache(BaseDatabaseCache):
+
+ # This class uses cursors provided by the database connection. This means
+ # it reads expiration values as aware or naive datetimes depending on the
+ # value of USE_TZ. They must be compared to aware or naive representations
+ # of "now" respectively.
+
+ # But it bypasses the ORM for write operations. As a consequence, aware
+ # datetimes aren't made naive for databases that don't support time zones.
+ # We work around this problem by always using naive datetimes when writing
+ # expiration values, in UTC when USE_TZ = True and in local time otherwise.
+
+ def get(self, key, default=None, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ db = router.db_for_read(self.cache_model_class)
+ table = connections[db].ops.quote_name(self._table)
+ cursor = connections[db].cursor()
+
+ cursor.execute("SELECT cache_key, value, expires FROM %s "
+ "WHERE cache_key = %%s" % table, [key])
+ row = cursor.fetchone()
+ if row is None:
+ return default
+ now = timezone.now()
+ expires = row[2]
+ if connections[db].features.needs_datetime_string_cast and not isinstance(expires, datetime):
+ # Note: typecasting is needed by some 3rd party database backends.
+ # All core backends work without typecasting, so be careful about
+ # changes here - test suite will NOT pick regressions here.
+ expires = typecast_timestamp(str(expires))
+ if expires < now:
+ db = router.db_for_write(self.cache_model_class)
+ cursor = connections[db].cursor()
+ cursor.execute("DELETE FROM %s "
+ "WHERE cache_key = %%s" % table, [key])
+ return default
+ value = connections[db].ops.process_clob(row[1])
+ return pickle.loads(base64.b64decode(force_bytes(value)))
+
+ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ self._base_set('set', key, value, timeout)
+
+ def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ return self._base_set('add', key, value, timeout)
+
+ def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
+ if timeout == DEFAULT_TIMEOUT:
+ timeout = self.default_timeout
+ db = router.db_for_write(self.cache_model_class)
+ table = connections[db].ops.quote_name(self._table)
+ cursor = connections[db].cursor()
+
+ cursor.execute("SELECT COUNT(*) FROM %s" % table)
+ num = cursor.fetchone()[0]
+ now = timezone.now()
+ now = now.replace(microsecond=0)
+ if timeout is None:
+ exp = datetime.max
+ elif settings.USE_TZ:
+ exp = datetime.utcfromtimestamp(time.time() + timeout)
+ else:
+ exp = datetime.fromtimestamp(time.time() + timeout)
+ exp = exp.replace(microsecond=0)
+ if num > self._max_entries:
+ self._cull(db, cursor, now)
+ pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
+ b64encoded = base64.b64encode(pickled)
+ # The DB column is expecting a string, so make sure the value is a
+ # string, not bytes. Refs #19274.
+ if six.PY3:
+ b64encoded = b64encoded.decode('latin1')
+ try:
+ # Note: typecasting for datetimes is needed by some 3rd party
+ # database backends. All core backends work without typecasting,
+ # so be careful about changes here - test suite will NOT pick
+ # regressions.
+ with transaction.atomic(using=db):
+ cursor.execute("SELECT cache_key, expires FROM %s "
+ "WHERE cache_key = %%s" % table, [key])
+ result = cursor.fetchone()
+ if result:
+ current_expires = result[1]
+ if (connections[db].features.needs_datetime_string_cast and not
+ isinstance(current_expires, datetime)):
+ current_expires = typecast_timestamp(str(current_expires))
+ exp = connections[db].ops.value_to_db_datetime(exp)
+ if result and (mode == 'set' or (mode == 'add' and current_expires < now)):
+ cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
+ "WHERE cache_key = %%s" % table,
+ [b64encoded, exp, key])
+ else:
+ cursor.execute("INSERT INTO %s (cache_key, value, expires) "
+ "VALUES (%%s, %%s, %%s)" % table,
+ [key, b64encoded, exp])
+ except DatabaseError:
+ # To be threadsafe, updates/inserts are allowed to fail silently
+ return False
+ else:
+ return True
+
+ def delete(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+
+ db = router.db_for_write(self.cache_model_class)
+ table = connections[db].ops.quote_name(self._table)
+ cursor = connections[db].cursor()
+
+ cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % table, [key])
+
+ def has_key(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+
+ db = router.db_for_read(self.cache_model_class)
+ table = connections[db].ops.quote_name(self._table)
+ cursor = connections[db].cursor()
+
+ if settings.USE_TZ:
+ now = datetime.utcnow()
+ else:
+ now = datetime.now()
+ now = now.replace(microsecond=0)
+ cursor.execute("SELECT cache_key FROM %s "
+ "WHERE cache_key = %%s and expires > %%s" % table,
+ [key, connections[db].ops.value_to_db_datetime(now)])
+ return cursor.fetchone() is not None
+
+ def _cull(self, db, cursor, now):
+ if self._cull_frequency == 0:
+ self.clear()
+ else:
+ # When USE_TZ is True, 'now' will be an aware datetime in UTC.
+ now = now.replace(tzinfo=None)
+ table = connections[db].ops.quote_name(self._table)
+ cursor.execute("DELETE FROM %s WHERE expires < %%s" % table,
+ [connections[db].ops.value_to_db_datetime(now)])
+ cursor.execute("SELECT COUNT(*) FROM %s" % table)
+ num = cursor.fetchone()[0]
+ if num > self._max_entries:
+ cull_num = num // self._cull_frequency
+ cursor.execute(
+ connections[db].ops.cache_key_culling_sql() % table,
+ [cull_num])
+ cursor.execute("DELETE FROM %s "
+ "WHERE cache_key < %%s" % table,
+ [cursor.fetchone()[0]])
+
+ def clear(self):
+ db = router.db_for_write(self.cache_model_class)
+ table = connections[db].ops.quote_name(self._table)
+ cursor = connections[db].cursor()
+ cursor.execute('DELETE FROM %s' % table)
+
+# For backwards compatibility
+class CacheClass(DatabaseCache):
+ pass
diff --git a/lib/python2.7/site-packages/django/core/cache/backends/dummy.py b/lib/python2.7/site-packages/django/core/cache/backends/dummy.py
new file mode 100644
index 0000000..7ca6114
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/backends/dummy.py
@@ -0,0 +1,46 @@
+"Dummy cache backend"
+
+from django.core.cache.backends.base import BaseCache, DEFAULT_TIMEOUT
+
+class DummyCache(BaseCache):
+ def __init__(self, host, *args, **kwargs):
+ BaseCache.__init__(self, *args, **kwargs)
+
+ def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ return True
+
+ def get(self, key, default=None, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ return default
+
+ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+
+ def delete(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+
+ def get_many(self, keys, version=None):
+ return {}
+
+ def has_key(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ return False
+
+ def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
+ pass
+
+ def delete_many(self, keys, version=None):
+ pass
+
+ def clear(self):
+ pass
+
+# For backwards compatibility
+class CacheClass(DummyCache):
+ pass
diff --git a/lib/python2.7/site-packages/django/core/cache/backends/filebased.py b/lib/python2.7/site-packages/django/core/cache/backends/filebased.py
new file mode 100644
index 0000000..d19eed4
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/backends/filebased.py
@@ -0,0 +1,160 @@
+"File-based cache backend"
+
+import hashlib
+import os
+import shutil
+import time
+try:
+ from django.utils.six.moves import cPickle as pickle
+except ImportError:
+ import pickle
+
+from django.core.cache.backends.base import BaseCache, DEFAULT_TIMEOUT
+from django.utils.encoding import force_bytes
+
+
+class FileBasedCache(BaseCache):
+ def __init__(self, dir, params):
+ BaseCache.__init__(self, params)
+ self._dir = dir
+ if not os.path.exists(self._dir):
+ self._createdir()
+
+ def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ if self.has_key(key, version=version):
+ return False
+
+ self.set(key, value, timeout, version=version)
+ return True
+
+ def get(self, key, default=None, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+
+ fname = self._key_to_file(key)
+ try:
+ with open(fname, 'rb') as f:
+ exp = pickle.load(f)
+ now = time.time()
+ if exp is not None and exp < now:
+ self._delete(fname)
+ else:
+ return pickle.load(f)
+ except (IOError, OSError, EOFError, pickle.PickleError):
+ pass
+ return default
+
+ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+
+ fname = self._key_to_file(key)
+ dirname = os.path.dirname(fname)
+
+ if timeout == DEFAULT_TIMEOUT:
+ timeout = self.default_timeout
+
+ self._cull()
+
+ try:
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+
+ with open(fname, 'wb') as f:
+ expiry = None if timeout is None else time.time() + timeout
+ pickle.dump(expiry, f, pickle.HIGHEST_PROTOCOL)
+ pickle.dump(value, f, pickle.HIGHEST_PROTOCOL)
+ except (IOError, OSError):
+ pass
+
+ def delete(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ try:
+ self._delete(self._key_to_file(key))
+ except (IOError, OSError):
+ pass
+
+ def _delete(self, fname):
+ os.remove(fname)
+ try:
+ # Remove the 2 subdirs if they're empty
+ dirname = os.path.dirname(fname)
+ os.rmdir(dirname)
+ os.rmdir(os.path.dirname(dirname))
+ except (IOError, OSError):
+ pass
+
+ def has_key(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ fname = self._key_to_file(key)
+ try:
+ with open(fname, 'rb') as f:
+ exp = pickle.load(f)
+ now = time.time()
+ if exp < now:
+ self._delete(fname)
+ return False
+ else:
+ return True
+ except (IOError, OSError, EOFError, pickle.PickleError):
+ return False
+
+ def _cull(self):
+ if int(self._num_entries) < self._max_entries:
+ return
+
+ try:
+ filelist = sorted(os.listdir(self._dir))
+ except (IOError, OSError):
+ return
+
+ if self._cull_frequency == 0:
+ doomed = filelist
+ else:
+ doomed = [os.path.join(self._dir, k) for (i, k) in enumerate(filelist) if i % self._cull_frequency == 0]
+
+ for topdir in doomed:
+ try:
+ for root, _, files in os.walk(topdir):
+ for f in files:
+ self._delete(os.path.join(root, f))
+ except (IOError, OSError):
+ pass
+
+ def _createdir(self):
+ try:
+ os.makedirs(self._dir)
+ except OSError:
+ raise EnvironmentError("Cache directory '%s' does not exist and could not be created'" % self._dir)
+
+ def _key_to_file(self, key):
+ """
+ Convert the filename into an md5 string. We'll turn the first couple
+ bits of the path into directory prefixes to be nice to filesystems
+ that have problems with large numbers of files in a directory.
+
+ Thus, a cache key of "foo" gets turnned into a file named
+ ``{cache-dir}ac/bd/18db4cc2f85cedef654fccc4a4d8``.
+ """
+ path = hashlib.md5(force_bytes(key)).hexdigest()
+ path = os.path.join(path[:2], path[2:4], path[4:])
+ return os.path.join(self._dir, path)
+
+ def _get_num_entries(self):
+ count = 0
+ for _,_,files in os.walk(self._dir):
+ count += len(files)
+ return count
+ _num_entries = property(_get_num_entries)
+
+ def clear(self):
+ try:
+ shutil.rmtree(self._dir)
+ except (IOError, OSError):
+ pass
+
+# For backwards compatibility
+class CacheClass(FileBasedCache):
+ pass
diff --git a/lib/python2.7/site-packages/django/core/cache/backends/locmem.py b/lib/python2.7/site-packages/django/core/cache/backends/locmem.py
new file mode 100644
index 0000000..1fa1705
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/backends/locmem.py
@@ -0,0 +1,140 @@
+"Thread-safe in-memory cache backend."
+
+import time
+try:
+ from django.utils.six.moves import cPickle as pickle
+except ImportError:
+ import pickle
+
+from django.core.cache.backends.base import BaseCache, DEFAULT_TIMEOUT
+from django.utils.synch import RWLock
+
+# Global in-memory store of cache data. Keyed by name, to provide
+# multiple named local memory caches.
+_caches = {}
+_expire_info = {}
+_locks = {}
+
+class LocMemCache(BaseCache):
+ def __init__(self, name, params):
+ BaseCache.__init__(self, params)
+ global _caches, _expire_info, _locks
+ self._cache = _caches.setdefault(name, {})
+ self._expire_info = _expire_info.setdefault(name, {})
+ self._lock = _locks.setdefault(name, RWLock())
+
+ def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ with self._lock.writer():
+ exp = self._expire_info.get(key)
+ if exp is None or exp <= time.time():
+ try:
+ pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
+ self._set(key, pickled, timeout)
+ return True
+ except pickle.PickleError:
+ pass
+ return False
+
+ def get(self, key, default=None, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ with self._lock.reader():
+ exp = self._expire_info.get(key, 0)
+ if exp is None or exp > time.time():
+ try:
+ pickled = self._cache[key]
+ return pickle.loads(pickled)
+ except pickle.PickleError:
+ return default
+ with self._lock.writer():
+ try:
+ del self._cache[key]
+ del self._expire_info[key]
+ except KeyError:
+ pass
+ return default
+
+ def _set(self, key, value, timeout=DEFAULT_TIMEOUT):
+ if len(self._cache) >= self._max_entries:
+ self._cull()
+ if timeout == DEFAULT_TIMEOUT:
+ timeout = self.default_timeout
+ expiry = None if timeout is None else time.time() + timeout
+ self._cache[key] = value
+ self._expire_info[key] = expiry
+
+ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ with self._lock.writer():
+ try:
+ pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
+ self._set(key, pickled, timeout)
+ except pickle.PickleError:
+ pass
+
+ def incr(self, key, delta=1, version=None):
+ value = self.get(key, version=version)
+ if value is None:
+ raise ValueError("Key '%s' not found" % key)
+ new_value = value + delta
+ key = self.make_key(key, version=version)
+ with self._lock.writer():
+ try:
+ pickled = pickle.dumps(new_value, pickle.HIGHEST_PROTOCOL)
+ self._cache[key] = pickled
+ except pickle.PickleError:
+ pass
+ return new_value
+
+ def has_key(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ with self._lock.reader():
+ exp = self._expire_info.get(key)
+ if exp is None:
+ return False
+ elif exp > time.time():
+ return True
+
+ with self._lock.writer():
+ try:
+ del self._cache[key]
+ del self._expire_info[key]
+ except KeyError:
+ pass
+ return False
+
+ def _cull(self):
+ if self._cull_frequency == 0:
+ self.clear()
+ else:
+ doomed = [k for (i, k) in enumerate(self._cache) if i % self._cull_frequency == 0]
+ for k in doomed:
+ self._delete(k)
+
+ def _delete(self, key):
+ try:
+ del self._cache[key]
+ except KeyError:
+ pass
+ try:
+ del self._expire_info[key]
+ except KeyError:
+ pass
+
+ def delete(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ with self._lock.writer():
+ self._delete(key)
+
+ def clear(self):
+ self._cache.clear()
+ self._expire_info.clear()
+
+# For backwards compatibility
+class CacheClass(LocMemCache):
+ pass
diff --git a/lib/python2.7/site-packages/django/core/cache/backends/memcached.py b/lib/python2.7/site-packages/django/core/cache/backends/memcached.py
new file mode 100644
index 0000000..19e8b02
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/backends/memcached.py
@@ -0,0 +1,190 @@
+"Memcached cache backend"
+
+import time
+import pickle
+from threading import local
+
+from django.core.cache.backends.base import BaseCache, DEFAULT_TIMEOUT
+
+from django.utils import six
+from django.utils.encoding import force_str
+
+class BaseMemcachedCache(BaseCache):
+ def __init__(self, server, params, library, value_not_found_exception):
+ super(BaseMemcachedCache, self).__init__(params)
+ if isinstance(server, six.string_types):
+ self._servers = server.split(';')
+ else:
+ self._servers = server
+
+ # The exception type to catch from the underlying library for a key
+ # that was not found. This is a ValueError for python-memcache,
+ # pylibmc.NotFound for pylibmc, and cmemcache will return None without
+ # raising an exception.
+ self.LibraryValueNotFoundException = value_not_found_exception
+
+ self._lib = library
+ self._options = params.get('OPTIONS', None)
+
+ @property
+ def _cache(self):
+ """
+ Implements transparent thread-safe access to a memcached client.
+ """
+ if getattr(self, '_client', None) is None:
+ self._client = self._lib.Client(self._servers)
+
+ return self._client
+
+ def _get_memcache_timeout(self, timeout=DEFAULT_TIMEOUT):
+ """
+ Memcached deals with long (> 30 days) timeouts in a special
+ way. Call this function to obtain a safe value for your timeout.
+ """
+ if timeout == DEFAULT_TIMEOUT:
+ return self.default_timeout
+
+ if timeout is None:
+ # Using 0 in memcache sets a non-expiring timeout.
+ return 0
+ elif int(timeout) == 0:
+ # Other cache backends treat 0 as set-and-expire. To achieve this
+ # in memcache backends, a negative timeout must be passed.
+ timeout = -1
+
+ if timeout > 2592000: # 60*60*24*30, 30 days
+ # See http://code.google.com/p/memcached/wiki/FAQ
+ # "You can set expire times up to 30 days in the future. After that
+ # memcached interprets it as a date, and will expire the item after
+ # said date. This is a simple (but obscure) mechanic."
+ #
+ # This means that we have to switch to absolute timestamps.
+ timeout += int(time.time())
+ return int(timeout)
+
+ def make_key(self, key, version=None):
+ # Python 2 memcache requires the key to be a byte string.
+ return force_str(super(BaseMemcachedCache, self).make_key(key, version))
+
+ def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ return self._cache.add(key, value, self._get_memcache_timeout(timeout))
+
+ def get(self, key, default=None, version=None):
+ key = self.make_key(key, version=version)
+ val = self._cache.get(key)
+ if val is None:
+ return default
+ return val
+
+ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ self._cache.set(key, value, self._get_memcache_timeout(timeout))
+
+ def delete(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self._cache.delete(key)
+
+ def get_many(self, keys, version=None):
+ new_keys = [self.make_key(x, version=version) for x in keys]
+ ret = self._cache.get_multi(new_keys)
+ if ret:
+ _ = {}
+ m = dict(zip(new_keys, keys))
+ for k, v in ret.items():
+ _[m[k]] = v
+ ret = _
+ return ret
+
+ def close(self, **kwargs):
+ self._cache.disconnect_all()
+
+ def incr(self, key, delta=1, version=None):
+ key = self.make_key(key, version=version)
+ # memcached doesn't support a negative delta
+ if delta < 0:
+ return self._cache.decr(key, -delta)
+ try:
+ val = self._cache.incr(key, delta)
+
+ # python-memcache responds to incr on non-existent keys by
+ # raising a ValueError, pylibmc by raising a pylibmc.NotFound
+ # and Cmemcache returns None. In all cases,
+ # we should raise a ValueError though.
+ except self.LibraryValueNotFoundException:
+ val = None
+ if val is None:
+ raise ValueError("Key '%s' not found" % key)
+ return val
+
+ def decr(self, key, delta=1, version=None):
+ key = self.make_key(key, version=version)
+ # memcached doesn't support a negative delta
+ if delta < 0:
+ return self._cache.incr(key, -delta)
+ try:
+ val = self._cache.decr(key, delta)
+
+ # python-memcache responds to incr on non-existent keys by
+ # raising a ValueError, pylibmc by raising a pylibmc.NotFound
+ # and Cmemcache returns None. In all cases,
+ # we should raise a ValueError though.
+ except self.LibraryValueNotFoundException:
+ val = None
+ if val is None:
+ raise ValueError("Key '%s' not found" % key)
+ return val
+
+ def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
+ safe_data = {}
+ for key, value in data.items():
+ key = self.make_key(key, version=version)
+ safe_data[key] = value
+ self._cache.set_multi(safe_data, self._get_memcache_timeout(timeout))
+
+ def delete_many(self, keys, version=None):
+ l = lambda x: self.make_key(x, version=version)
+ self._cache.delete_multi(map(l, keys))
+
+ def clear(self):
+ self._cache.flush_all()
+
+class MemcachedCache(BaseMemcachedCache):
+ "An implementation of a cache binding using python-memcached"
+ def __init__(self, server, params):
+ import memcache
+ super(MemcachedCache, self).__init__(server, params,
+ library=memcache,
+ value_not_found_exception=ValueError)
+
+ @property
+ def _cache(self):
+ if getattr(self, '_client', None) is None:
+ self._client = self._lib.Client(self._servers, pickleProtocol=pickle.HIGHEST_PROTOCOL)
+ return self._client
+
+class PyLibMCCache(BaseMemcachedCache):
+ "An implementation of a cache binding using pylibmc"
+ def __init__(self, server, params):
+ import pylibmc
+ self._local = local()
+ super(PyLibMCCache, self).__init__(server, params,
+ library=pylibmc,
+ value_not_found_exception=pylibmc.NotFound)
+
+ @property
+ def _cache(self):
+ # PylibMC uses cache options as the 'behaviors' attribute.
+ # It also needs to use threadlocals, because some versions of
+ # PylibMC don't play well with the GIL.
+ client = getattr(self._local, 'client', None)
+ if client:
+ return client
+
+ client = self._lib.Client(self._servers)
+ if self._options:
+ client.behaviors = self._options
+
+ self._local.client = client
+
+ return client
diff --git a/lib/python2.7/site-packages/django/core/cache/utils.py b/lib/python2.7/site-packages/django/core/cache/utils.py
new file mode 100644
index 0000000..4310825
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/utils.py
@@ -0,0 +1,15 @@
+from __future__ import absolute_import, unicode_literals
+
+import hashlib
+from django.utils.encoding import force_bytes
+from django.utils.http import urlquote
+
+TEMPLATE_FRAGMENT_KEY_TEMPLATE = 'template.cache.%s.%s'
+
+
+def make_template_fragment_key(fragment_name, vary_on=None):
+ if vary_on is None:
+ vary_on = ()
+ key = ':'.join([urlquote(var) for var in vary_on])
+ args = hashlib.md5(force_bytes(key))
+ return TEMPLATE_FRAGMENT_KEY_TEMPLATE % (fragment_name, args.hexdigest())