summaryrefslogtreecommitdiff
path: root/lib/python2.7/site-packages/django/core
diff options
context:
space:
mode:
authorttt2017-05-13 00:29:47 +0530
committerttt2017-05-13 00:29:47 +0530
commitabf599be33b383a6a5baf9493093b2126a622ac8 (patch)
tree4c5ab6e0d935d5e65fabcf0258e4a00dd20a5afa /lib/python2.7/site-packages/django/core
downloadSBHS-2018-Rpi-abf599be33b383a6a5baf9493093b2126a622ac8.tar.gz
SBHS-2018-Rpi-abf599be33b383a6a5baf9493093b2126a622ac8.tar.bz2
SBHS-2018-Rpi-abf599be33b383a6a5baf9493093b2126a622ac8.zip
added all server files
Diffstat (limited to 'lib/python2.7/site-packages/django/core')
-rw-r--r--lib/python2.7/site-packages/django/core/__init__.py0
-rw-r--r--lib/python2.7/site-packages/django/core/cache/__init__.py138
-rw-r--r--lib/python2.7/site-packages/django/core/cache/backends/__init__.py0
-rw-r--r--lib/python2.7/site-packages/django/core/cache/backends/base.py235
-rw-r--r--lib/python2.7/site-packages/django/core/cache/backends/db.py205
-rw-r--r--lib/python2.7/site-packages/django/core/cache/backends/dummy.py46
-rw-r--r--lib/python2.7/site-packages/django/core/cache/backends/filebased.py160
-rw-r--r--lib/python2.7/site-packages/django/core/cache/backends/locmem.py140
-rw-r--r--lib/python2.7/site-packages/django/core/cache/backends/memcached.py190
-rw-r--r--lib/python2.7/site-packages/django/core/cache/utils.py15
-rw-r--r--lib/python2.7/site-packages/django/core/checks/__init__.py0
-rw-r--r--lib/python2.7/site-packages/django/core/checks/compatibility/__init__.py0
-rw-r--r--lib/python2.7/site-packages/django/core/checks/compatibility/base.py39
-rw-r--r--lib/python2.7/site-packages/django/core/checks/compatibility/django_1_6_0.py62
-rw-r--r--lib/python2.7/site-packages/django/core/context_processors.py75
-rw-r--r--lib/python2.7/site-packages/django/core/exceptions.py140
-rw-r--r--lib/python2.7/site-packages/django/core/files/__init__.py1
-rw-r--r--lib/python2.7/site-packages/django/core/files/base.py158
-rw-r--r--lib/python2.7/site-packages/django/core/files/images.py74
-rw-r--r--lib/python2.7/site-packages/django/core/files/locks.py69
-rw-r--r--lib/python2.7/site-packages/django/core/files/move.py89
-rw-r--r--lib/python2.7/site-packages/django/core/files/storage.py288
-rw-r--r--lib/python2.7/site-packages/django/core/files/temp.py65
-rw-r--r--lib/python2.7/site-packages/django/core/files/uploadedfile.py125
-rw-r--r--lib/python2.7/site-packages/django/core/files/uploadhandler.py203
-rw-r--r--lib/python2.7/site-packages/django/core/files/utils.py29
-rw-r--r--lib/python2.7/site-packages/django/core/handlers/__init__.py0
-rw-r--r--lib/python2.7/site-packages/django/core/handlers/base.py290
-rw-r--r--lib/python2.7/site-packages/django/core/handlers/wsgi.py215
-rw-r--r--lib/python2.7/site-packages/django/core/mail/__init__.py99
-rw-r--r--lib/python2.7/site-packages/django/core/mail/backends/__init__.py1
-rw-r--r--lib/python2.7/site-packages/django/core/mail/backends/base.py39
-rw-r--r--lib/python2.7/site-packages/django/core/mail/backends/console.py41
-rw-r--r--lib/python2.7/site-packages/django/core/mail/backends/dummy.py9
-rw-r--r--lib/python2.7/site-packages/django/core/mail/backends/filebased.py65
-rw-r--r--lib/python2.7/site-packages/django/core/mail/backends/locmem.py26
-rw-r--r--lib/python2.7/site-packages/django/core/mail/backends/smtp.py115
-rw-r--r--lib/python2.7/site-packages/django/core/mail/message.py392
-rw-r--r--lib/python2.7/site-packages/django/core/mail/utils.py19
-rw-r--r--lib/python2.7/site-packages/django/core/management/__init__.py399
-rw-r--r--lib/python2.7/site-packages/django/core/management/base.py422
-rw-r--r--lib/python2.7/site-packages/django/core/management/color.py50
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/__init__.py0
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/check.py14
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/cleanup.py11
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/compilemessages.py71
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/createcachetable.py65
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/dbshell.py28
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/diffsettings.py40
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/dumpdata.py214
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/flush.py101
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/inspectdb.py237
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/loaddata.py282
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/makemessages.py422
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/runfcgi.py20
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/runserver.py149
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/shell.py113
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/sql.py21
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/sqlall.py22
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/sqlclear.py21
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/sqlcustom.py21
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/sqldropindexes.py23
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/sqlflush.py21
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/sqlindexes.py22
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/sqlinitialdata.py7
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/sqlsequencereset.py22
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/startapp.py24
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/startproject.py30
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/syncdb.py162
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/test.py91
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/testserver.py45
-rw-r--r--lib/python2.7/site-packages/django/core/management/commands/validate.py10
-rw-r--r--lib/python2.7/site-packages/django/core/management/sql.py216
-rw-r--r--lib/python2.7/site-packages/django/core/management/templates.py325
-rw-r--r--lib/python2.7/site-packages/django/core/management/utils.py79
-rw-r--r--lib/python2.7/site-packages/django/core/management/validation.py372
-rw-r--r--lib/python2.7/site-packages/django/core/paginator.py161
-rw-r--r--lib/python2.7/site-packages/django/core/serializers/__init__.py148
-rw-r--r--lib/python2.7/site-packages/django/core/serializers/base.py171
-rw-r--r--lib/python2.7/site-packages/django/core/serializers/json.py108
-rw-r--r--lib/python2.7/site-packages/django/core/serializers/python.py154
-rw-r--r--lib/python2.7/site-packages/django/core/serializers/pyyaml.py75
-rw-r--r--lib/python2.7/site-packages/django/core/serializers/xml_serializer.py391
-rw-r--r--lib/python2.7/site-packages/django/core/servers/__init__.py0
-rw-r--r--lib/python2.7/site-packages/django/core/servers/basehttp.py169
-rw-r--r--lib/python2.7/site-packages/django/core/servers/fastcgi.py185
-rw-r--r--lib/python2.7/site-packages/django/core/signals.py5
-rw-r--r--lib/python2.7/site-packages/django/core/signing.py199
-rw-r--r--lib/python2.7/site-packages/django/core/urlresolvers.py595
-rw-r--r--lib/python2.7/site-packages/django/core/validators.py221
-rw-r--r--lib/python2.7/site-packages/django/core/wsgi.py13
91 files changed, 10624 insertions, 0 deletions
diff --git a/lib/python2.7/site-packages/django/core/__init__.py b/lib/python2.7/site-packages/django/core/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/__init__.py
diff --git a/lib/python2.7/site-packages/django/core/cache/__init__.py b/lib/python2.7/site-packages/django/core/cache/__init__.py
new file mode 100644
index 0000000..ea3a68f
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/__init__.py
@@ -0,0 +1,138 @@
+"""
+Caching framework.
+
+This package defines set of cache backends that all conform to a simple API.
+In a nutshell, a cache is a set of values -- which can be any object that
+may be pickled -- identified by string keys. For the complete API, see
+the abstract BaseCache class in django.core.cache.backends.base.
+
+Client code should not access a cache backend directly; instead it should
+either use the "cache" variable made available here, or it should use the
+get_cache() function made available here. get_cache() takes a backend URI
+(e.g. "memcached://127.0.0.1:11211/") and returns an instance of a backend
+cache class.
+
+See docs/topics/cache.txt for information on the public API.
+"""
+
+from django.conf import settings
+from django.core import signals
+from django.core.cache.backends.base import (
+ InvalidCacheBackendError, CacheKeyWarning, BaseCache)
+from django.core.exceptions import ImproperlyConfigured
+from django.utils import importlib
+from django.utils.module_loading import import_by_path
+from django.utils.six.moves.urllib.parse import parse_qsl
+
+
+__all__ = [
+ 'get_cache', 'cache', 'DEFAULT_CACHE_ALIAS'
+]
+
+# Name for use in settings file --> name of module in "backends" directory.
+# Any backend scheme that is not in this dictionary is treated as a Python
+# import path to a custom backend.
+BACKENDS = {
+ 'memcached': 'memcached',
+ 'locmem': 'locmem',
+ 'file': 'filebased',
+ 'db': 'db',
+ 'dummy': 'dummy',
+}
+
+DEFAULT_CACHE_ALIAS = 'default'
+
+def parse_backend_uri(backend_uri):
+ """
+ Converts the "backend_uri" into a cache scheme ('db', 'memcached', etc), a
+ host and any extra params that are required for the backend. Returns a
+ (scheme, host, params) tuple.
+ """
+ if backend_uri.find(':') == -1:
+ raise InvalidCacheBackendError("Backend URI must start with scheme://")
+ scheme, rest = backend_uri.split(':', 1)
+ if not rest.startswith('//'):
+ raise InvalidCacheBackendError("Backend URI must start with scheme://")
+
+ host = rest[2:]
+ qpos = rest.find('?')
+ if qpos != -1:
+ params = dict(parse_qsl(rest[qpos+1:]))
+ host = rest[2:qpos]
+ else:
+ params = {}
+ if host.endswith('/'):
+ host = host[:-1]
+
+ return scheme, host, params
+
+if DEFAULT_CACHE_ALIAS not in settings.CACHES:
+ raise ImproperlyConfigured("You must define a '%s' cache" % DEFAULT_CACHE_ALIAS)
+
+def parse_backend_conf(backend, **kwargs):
+ """
+ Helper function to parse the backend configuration
+ that doesn't use the URI notation.
+ """
+ # Try to get the CACHES entry for the given backend name first
+ conf = settings.CACHES.get(backend, None)
+ if conf is not None:
+ args = conf.copy()
+ args.update(kwargs)
+ backend = args.pop('BACKEND')
+ location = args.pop('LOCATION', '')
+ return backend, location, args
+ else:
+ try:
+ # Trying to import the given backend, in case it's a dotted path
+ backend_cls = import_by_path(backend)
+ except ImproperlyConfigured as e:
+ raise InvalidCacheBackendError("Could not find backend '%s': %s" % (
+ backend, e))
+ location = kwargs.pop('LOCATION', '')
+ return backend, location, kwargs
+
+def get_cache(backend, **kwargs):
+ """
+ Function to load a cache backend dynamically. This is flexible by design
+ to allow different use cases:
+
+ To load a backend with the old URI-based notation::
+
+ cache = get_cache('locmem://')
+
+ To load a backend that is pre-defined in the settings::
+
+ cache = get_cache('default')
+
+ To load a backend with its dotted import path,
+ including arbitrary options::
+
+ cache = get_cache('django.core.cache.backends.memcached.MemcachedCache', **{
+ 'LOCATION': '127.0.0.1:11211', 'TIMEOUT': 30,
+ })
+
+ """
+ try:
+ if '://' in backend:
+ # for backwards compatibility
+ backend, location, params = parse_backend_uri(backend)
+ if backend in BACKENDS:
+ backend = 'django.core.cache.backends.%s' % BACKENDS[backend]
+ params.update(kwargs)
+ mod = importlib.import_module(backend)
+ backend_cls = mod.CacheClass
+ else:
+ backend, location, params = parse_backend_conf(backend, **kwargs)
+ backend_cls = import_by_path(backend)
+ except (AttributeError, ImportError, ImproperlyConfigured) as e:
+ raise InvalidCacheBackendError(
+ "Could not find backend '%s': %s" % (backend, e))
+ cache = backend_cls(location, params)
+ # Some caches -- python-memcached in particular -- need to do a cleanup at the
+ # end of a request cycle. If not implemented in a particular backend
+ # cache.close is a no-op
+ signals.request_finished.connect(cache.close)
+ return cache
+
+cache = get_cache(DEFAULT_CACHE_ALIAS)
diff --git a/lib/python2.7/site-packages/django/core/cache/backends/__init__.py b/lib/python2.7/site-packages/django/core/cache/backends/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/backends/__init__.py
diff --git a/lib/python2.7/site-packages/django/core/cache/backends/base.py b/lib/python2.7/site-packages/django/core/cache/backends/base.py
new file mode 100644
index 0000000..deb98e7
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/backends/base.py
@@ -0,0 +1,235 @@
+"Base Cache class."
+from __future__ import unicode_literals
+
+import warnings
+
+from django.core.exceptions import ImproperlyConfigured, DjangoRuntimeWarning
+from django.utils.module_loading import import_by_path
+
+
+class InvalidCacheBackendError(ImproperlyConfigured):
+ pass
+
+
+class CacheKeyWarning(DjangoRuntimeWarning):
+ pass
+
+
+# Stub class to ensure not passing in a `timeout` argument results in
+# the default timeout
+DEFAULT_TIMEOUT = object()
+
+# Memcached does not accept keys longer than this.
+MEMCACHE_MAX_KEY_LENGTH = 250
+
+
+def default_key_func(key, key_prefix, version):
+ """
+ Default function to generate keys.
+
+ Constructs the key used by all other methods. By default it prepends
+ the `key_prefix'. KEY_FUNCTION can be used to specify an alternate
+ function with custom key making behavior.
+ """
+ return '%s:%s:%s' % (key_prefix, version, key)
+
+
+def get_key_func(key_func):
+ """
+ Function to decide which key function to use.
+
+ Defaults to ``default_key_func``.
+ """
+ if key_func is not None:
+ if callable(key_func):
+ return key_func
+ else:
+ return import_by_path(key_func)
+ return default_key_func
+
+
+class BaseCache(object):
+ def __init__(self, params):
+ timeout = params.get('timeout', params.get('TIMEOUT', 300))
+ try:
+ timeout = int(timeout)
+ except (ValueError, TypeError):
+ timeout = 300
+ self.default_timeout = timeout
+
+ options = params.get('OPTIONS', {})
+ max_entries = params.get('max_entries', options.get('MAX_ENTRIES', 300))
+ try:
+ self._max_entries = int(max_entries)
+ except (ValueError, TypeError):
+ self._max_entries = 300
+
+ cull_frequency = params.get('cull_frequency', options.get('CULL_FREQUENCY', 3))
+ try:
+ self._cull_frequency = int(cull_frequency)
+ except (ValueError, TypeError):
+ self._cull_frequency = 3
+
+ self.key_prefix = params.get('KEY_PREFIX', '')
+ self.version = params.get('VERSION', 1)
+ self.key_func = get_key_func(params.get('KEY_FUNCTION', None))
+
+ def make_key(self, key, version=None):
+ """Constructs the key used by all other methods. By default it
+ uses the key_func to generate a key (which, by default,
+ prepends the `key_prefix' and 'version'). An different key
+ function can be provided at the time of cache construction;
+ alternatively, you can subclass the cache backend to provide
+ custom key making behavior.
+ """
+ if version is None:
+ version = self.version
+
+ new_key = self.key_func(key, self.key_prefix, version)
+ return new_key
+
+ def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ """
+ Set a value in the cache if the key does not already exist. If
+ timeout is given, that timeout will be used for the key; otherwise
+ the default cache timeout will be used.
+
+ Returns True if the value was stored, False otherwise.
+ """
+ raise NotImplementedError
+
+ def get(self, key, default=None, version=None):
+ """
+ Fetch a given key from the cache. If the key does not exist, return
+ default, which itself defaults to None.
+ """
+ raise NotImplementedError
+
+ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ """
+ Set a value in the cache. If timeout is given, that timeout will be
+ used for the key; otherwise the default cache timeout will be used.
+ """
+ raise NotImplementedError
+
+ def delete(self, key, version=None):
+ """
+ Delete a key from the cache, failing silently.
+ """
+ raise NotImplementedError
+
+ def get_many(self, keys, version=None):
+ """
+ Fetch a bunch of keys from the cache. For certain backends (memcached,
+ pgsql) this can be *much* faster when fetching multiple values.
+
+ Returns a dict mapping each key in keys to its value. If the given
+ key is missing, it will be missing from the response dict.
+ """
+ d = {}
+ for k in keys:
+ val = self.get(k, version=version)
+ if val is not None:
+ d[k] = val
+ return d
+
+ def has_key(self, key, version=None):
+ """
+ Returns True if the key is in the cache and has not expired.
+ """
+ return self.get(key, version=version) is not None
+
+ def incr(self, key, delta=1, version=None):
+ """
+ Add delta to value in the cache. If the key does not exist, raise a
+ ValueError exception.
+ """
+ value = self.get(key, version=version)
+ if value is None:
+ raise ValueError("Key '%s' not found" % key)
+ new_value = value + delta
+ self.set(key, new_value, version=version)
+ return new_value
+
+ def decr(self, key, delta=1, version=None):
+ """
+ Subtract delta from value in the cache. If the key does not exist, raise
+ a ValueError exception.
+ """
+ return self.incr(key, -delta, version=version)
+
+ def __contains__(self, key):
+ """
+ Returns True if the key is in the cache and has not expired.
+ """
+ # This is a separate method, rather than just a copy of has_key(),
+ # so that it always has the same functionality as has_key(), even
+ # if a subclass overrides it.
+ return self.has_key(key)
+
+ def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
+ """
+ Set a bunch of values in the cache at once from a dict of key/value
+ pairs. For certain backends (memcached), this is much more efficient
+ than calling set() multiple times.
+
+ If timeout is given, that timeout will be used for the key; otherwise
+ the default cache timeout will be used.
+ """
+ for key, value in data.items():
+ self.set(key, value, timeout=timeout, version=version)
+
+ def delete_many(self, keys, version=None):
+ """
+ Set a bunch of values in the cache at once. For certain backends
+ (memcached), this is much more efficient than calling delete() multiple
+ times.
+ """
+ for key in keys:
+ self.delete(key, version=version)
+
+ def clear(self):
+ """Remove *all* values from the cache at once."""
+ raise NotImplementedError
+
+ def validate_key(self, key):
+ """
+ Warn about keys that would not be portable to the memcached
+ backend. This encourages (but does not force) writing backend-portable
+ cache code.
+
+ """
+ if len(key) > MEMCACHE_MAX_KEY_LENGTH:
+ warnings.warn('Cache key will cause errors if used with memcached: '
+ '%s (longer than %s)' % (key, MEMCACHE_MAX_KEY_LENGTH),
+ CacheKeyWarning)
+ for char in key:
+ if ord(char) < 33 or ord(char) == 127:
+ warnings.warn('Cache key contains characters that will cause '
+ 'errors if used with memcached: %r' % key,
+ CacheKeyWarning)
+
+ def incr_version(self, key, delta=1, version=None):
+ """Adds delta to the cache version for the supplied key. Returns the
+ new version.
+ """
+ if version is None:
+ version = self.version
+
+ value = self.get(key, version=version)
+ if value is None:
+ raise ValueError("Key '%s' not found" % key)
+
+ self.set(key, value, version=version+delta)
+ self.delete(key, version=version)
+ return version+delta
+
+ def decr_version(self, key, delta=1, version=None):
+ """Substracts delta from the cache version for the supplied key. Returns
+ the new version.
+ """
+ return self.incr_version(key, -delta, version)
+
+ def close(self, **kwargs):
+ """Close the cache connection"""
+ pass
diff --git a/lib/python2.7/site-packages/django/core/cache/backends/db.py b/lib/python2.7/site-packages/django/core/cache/backends/db.py
new file mode 100644
index 0000000..5c9d37b
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/backends/db.py
@@ -0,0 +1,205 @@
+"Database cache backend."
+import base64
+import time
+from datetime import datetime
+
+try:
+ from django.utils.six.moves import cPickle as pickle
+except ImportError:
+ import pickle
+
+from django.conf import settings
+from django.core.cache.backends.base import BaseCache, DEFAULT_TIMEOUT
+from django.db import connections, transaction, router, DatabaseError
+from django.db.backends.util import typecast_timestamp
+from django.utils import timezone, six
+from django.utils.encoding import force_bytes
+
+
+class Options(object):
+ """A class that will quack like a Django model _meta class.
+
+ This allows cache operations to be controlled by the router
+ """
+ def __init__(self, table):
+ self.db_table = table
+ self.app_label = 'django_cache'
+ self.model_name = 'cacheentry'
+ self.verbose_name = 'cache entry'
+ self.verbose_name_plural = 'cache entries'
+ self.object_name = 'CacheEntry'
+ self.abstract = False
+ self.managed = True
+ self.proxy = False
+
+class BaseDatabaseCache(BaseCache):
+ def __init__(self, table, params):
+ BaseCache.__init__(self, params)
+ self._table = table
+
+ class CacheEntry(object):
+ _meta = Options(table)
+ self.cache_model_class = CacheEntry
+
+class DatabaseCache(BaseDatabaseCache):
+
+ # This class uses cursors provided by the database connection. This means
+ # it reads expiration values as aware or naive datetimes depending on the
+ # value of USE_TZ. They must be compared to aware or naive representations
+ # of "now" respectively.
+
+ # But it bypasses the ORM for write operations. As a consequence, aware
+ # datetimes aren't made naive for databases that don't support time zones.
+ # We work around this problem by always using naive datetimes when writing
+ # expiration values, in UTC when USE_TZ = True and in local time otherwise.
+
+ def get(self, key, default=None, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ db = router.db_for_read(self.cache_model_class)
+ table = connections[db].ops.quote_name(self._table)
+ cursor = connections[db].cursor()
+
+ cursor.execute("SELECT cache_key, value, expires FROM %s "
+ "WHERE cache_key = %%s" % table, [key])
+ row = cursor.fetchone()
+ if row is None:
+ return default
+ now = timezone.now()
+ expires = row[2]
+ if connections[db].features.needs_datetime_string_cast and not isinstance(expires, datetime):
+ # Note: typecasting is needed by some 3rd party database backends.
+ # All core backends work without typecasting, so be careful about
+ # changes here - test suite will NOT pick regressions here.
+ expires = typecast_timestamp(str(expires))
+ if expires < now:
+ db = router.db_for_write(self.cache_model_class)
+ cursor = connections[db].cursor()
+ cursor.execute("DELETE FROM %s "
+ "WHERE cache_key = %%s" % table, [key])
+ return default
+ value = connections[db].ops.process_clob(row[1])
+ return pickle.loads(base64.b64decode(force_bytes(value)))
+
+ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ self._base_set('set', key, value, timeout)
+
+ def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ return self._base_set('add', key, value, timeout)
+
+ def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
+ if timeout == DEFAULT_TIMEOUT:
+ timeout = self.default_timeout
+ db = router.db_for_write(self.cache_model_class)
+ table = connections[db].ops.quote_name(self._table)
+ cursor = connections[db].cursor()
+
+ cursor.execute("SELECT COUNT(*) FROM %s" % table)
+ num = cursor.fetchone()[0]
+ now = timezone.now()
+ now = now.replace(microsecond=0)
+ if timeout is None:
+ exp = datetime.max
+ elif settings.USE_TZ:
+ exp = datetime.utcfromtimestamp(time.time() + timeout)
+ else:
+ exp = datetime.fromtimestamp(time.time() + timeout)
+ exp = exp.replace(microsecond=0)
+ if num > self._max_entries:
+ self._cull(db, cursor, now)
+ pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
+ b64encoded = base64.b64encode(pickled)
+ # The DB column is expecting a string, so make sure the value is a
+ # string, not bytes. Refs #19274.
+ if six.PY3:
+ b64encoded = b64encoded.decode('latin1')
+ try:
+ # Note: typecasting for datetimes is needed by some 3rd party
+ # database backends. All core backends work without typecasting,
+ # so be careful about changes here - test suite will NOT pick
+ # regressions.
+ with transaction.atomic(using=db):
+ cursor.execute("SELECT cache_key, expires FROM %s "
+ "WHERE cache_key = %%s" % table, [key])
+ result = cursor.fetchone()
+ if result:
+ current_expires = result[1]
+ if (connections[db].features.needs_datetime_string_cast and not
+ isinstance(current_expires, datetime)):
+ current_expires = typecast_timestamp(str(current_expires))
+ exp = connections[db].ops.value_to_db_datetime(exp)
+ if result and (mode == 'set' or (mode == 'add' and current_expires < now)):
+ cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
+ "WHERE cache_key = %%s" % table,
+ [b64encoded, exp, key])
+ else:
+ cursor.execute("INSERT INTO %s (cache_key, value, expires) "
+ "VALUES (%%s, %%s, %%s)" % table,
+ [key, b64encoded, exp])
+ except DatabaseError:
+ # To be threadsafe, updates/inserts are allowed to fail silently
+ return False
+ else:
+ return True
+
+ def delete(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+
+ db = router.db_for_write(self.cache_model_class)
+ table = connections[db].ops.quote_name(self._table)
+ cursor = connections[db].cursor()
+
+ cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % table, [key])
+
+ def has_key(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+
+ db = router.db_for_read(self.cache_model_class)
+ table = connections[db].ops.quote_name(self._table)
+ cursor = connections[db].cursor()
+
+ if settings.USE_TZ:
+ now = datetime.utcnow()
+ else:
+ now = datetime.now()
+ now = now.replace(microsecond=0)
+ cursor.execute("SELECT cache_key FROM %s "
+ "WHERE cache_key = %%s and expires > %%s" % table,
+ [key, connections[db].ops.value_to_db_datetime(now)])
+ return cursor.fetchone() is not None
+
+ def _cull(self, db, cursor, now):
+ if self._cull_frequency == 0:
+ self.clear()
+ else:
+ # When USE_TZ is True, 'now' will be an aware datetime in UTC.
+ now = now.replace(tzinfo=None)
+ table = connections[db].ops.quote_name(self._table)
+ cursor.execute("DELETE FROM %s WHERE expires < %%s" % table,
+ [connections[db].ops.value_to_db_datetime(now)])
+ cursor.execute("SELECT COUNT(*) FROM %s" % table)
+ num = cursor.fetchone()[0]
+ if num > self._max_entries:
+ cull_num = num // self._cull_frequency
+ cursor.execute(
+ connections[db].ops.cache_key_culling_sql() % table,
+ [cull_num])
+ cursor.execute("DELETE FROM %s "
+ "WHERE cache_key < %%s" % table,
+ [cursor.fetchone()[0]])
+
+ def clear(self):
+ db = router.db_for_write(self.cache_model_class)
+ table = connections[db].ops.quote_name(self._table)
+ cursor = connections[db].cursor()
+ cursor.execute('DELETE FROM %s' % table)
+
+# For backwards compatibility
+class CacheClass(DatabaseCache):
+ pass
diff --git a/lib/python2.7/site-packages/django/core/cache/backends/dummy.py b/lib/python2.7/site-packages/django/core/cache/backends/dummy.py
new file mode 100644
index 0000000..7ca6114
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/backends/dummy.py
@@ -0,0 +1,46 @@
+"Dummy cache backend"
+
+from django.core.cache.backends.base import BaseCache, DEFAULT_TIMEOUT
+
+class DummyCache(BaseCache):
+ def __init__(self, host, *args, **kwargs):
+ BaseCache.__init__(self, *args, **kwargs)
+
+ def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ return True
+
+ def get(self, key, default=None, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ return default
+
+ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+
+ def delete(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+
+ def get_many(self, keys, version=None):
+ return {}
+
+ def has_key(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ return False
+
+ def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
+ pass
+
+ def delete_many(self, keys, version=None):
+ pass
+
+ def clear(self):
+ pass
+
+# For backwards compatibility
+class CacheClass(DummyCache):
+ pass
diff --git a/lib/python2.7/site-packages/django/core/cache/backends/filebased.py b/lib/python2.7/site-packages/django/core/cache/backends/filebased.py
new file mode 100644
index 0000000..d19eed4
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/backends/filebased.py
@@ -0,0 +1,160 @@
+"File-based cache backend"
+
+import hashlib
+import os
+import shutil
+import time
+try:
+ from django.utils.six.moves import cPickle as pickle
+except ImportError:
+ import pickle
+
+from django.core.cache.backends.base import BaseCache, DEFAULT_TIMEOUT
+from django.utils.encoding import force_bytes
+
+
+class FileBasedCache(BaseCache):
+ def __init__(self, dir, params):
+ BaseCache.__init__(self, params)
+ self._dir = dir
+ if not os.path.exists(self._dir):
+ self._createdir()
+
+ def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ if self.has_key(key, version=version):
+ return False
+
+ self.set(key, value, timeout, version=version)
+ return True
+
+ def get(self, key, default=None, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+
+ fname = self._key_to_file(key)
+ try:
+ with open(fname, 'rb') as f:
+ exp = pickle.load(f)
+ now = time.time()
+ if exp is not None and exp < now:
+ self._delete(fname)
+ else:
+ return pickle.load(f)
+ except (IOError, OSError, EOFError, pickle.PickleError):
+ pass
+ return default
+
+ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+
+ fname = self._key_to_file(key)
+ dirname = os.path.dirname(fname)
+
+ if timeout == DEFAULT_TIMEOUT:
+ timeout = self.default_timeout
+
+ self._cull()
+
+ try:
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+
+ with open(fname, 'wb') as f:
+ expiry = None if timeout is None else time.time() + timeout
+ pickle.dump(expiry, f, pickle.HIGHEST_PROTOCOL)
+ pickle.dump(value, f, pickle.HIGHEST_PROTOCOL)
+ except (IOError, OSError):
+ pass
+
+ def delete(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ try:
+ self._delete(self._key_to_file(key))
+ except (IOError, OSError):
+ pass
+
+ def _delete(self, fname):
+ os.remove(fname)
+ try:
+ # Remove the 2 subdirs if they're empty
+ dirname = os.path.dirname(fname)
+ os.rmdir(dirname)
+ os.rmdir(os.path.dirname(dirname))
+ except (IOError, OSError):
+ pass
+
+ def has_key(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ fname = self._key_to_file(key)
+ try:
+ with open(fname, 'rb') as f:
+ exp = pickle.load(f)
+ now = time.time()
+ if exp < now:
+ self._delete(fname)
+ return False
+ else:
+ return True
+ except (IOError, OSError, EOFError, pickle.PickleError):
+ return False
+
+ def _cull(self):
+ if int(self._num_entries) < self._max_entries:
+ return
+
+ try:
+ filelist = sorted(os.listdir(self._dir))
+ except (IOError, OSError):
+ return
+
+ if self._cull_frequency == 0:
+ doomed = filelist
+ else:
+ doomed = [os.path.join(self._dir, k) for (i, k) in enumerate(filelist) if i % self._cull_frequency == 0]
+
+ for topdir in doomed:
+ try:
+ for root, _, files in os.walk(topdir):
+ for f in files:
+ self._delete(os.path.join(root, f))
+ except (IOError, OSError):
+ pass
+
+ def _createdir(self):
+ try:
+ os.makedirs(self._dir)
+ except OSError:
+ raise EnvironmentError("Cache directory '%s' does not exist and could not be created'" % self._dir)
+
+ def _key_to_file(self, key):
+ """
+ Convert the filename into an md5 string. We'll turn the first couple
+ bits of the path into directory prefixes to be nice to filesystems
+ that have problems with large numbers of files in a directory.
+
+ Thus, a cache key of "foo" gets turnned into a file named
+ ``{cache-dir}ac/bd/18db4cc2f85cedef654fccc4a4d8``.
+ """
+ path = hashlib.md5(force_bytes(key)).hexdigest()
+ path = os.path.join(path[:2], path[2:4], path[4:])
+ return os.path.join(self._dir, path)
+
+ def _get_num_entries(self):
+ count = 0
+ for _,_,files in os.walk(self._dir):
+ count += len(files)
+ return count
+ _num_entries = property(_get_num_entries)
+
+ def clear(self):
+ try:
+ shutil.rmtree(self._dir)
+ except (IOError, OSError):
+ pass
+
+# For backwards compatibility
+class CacheClass(FileBasedCache):
+ pass
diff --git a/lib/python2.7/site-packages/django/core/cache/backends/locmem.py b/lib/python2.7/site-packages/django/core/cache/backends/locmem.py
new file mode 100644
index 0000000..1fa1705
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/backends/locmem.py
@@ -0,0 +1,140 @@
+"Thread-safe in-memory cache backend."
+
+import time
+try:
+ from django.utils.six.moves import cPickle as pickle
+except ImportError:
+ import pickle
+
+from django.core.cache.backends.base import BaseCache, DEFAULT_TIMEOUT
+from django.utils.synch import RWLock
+
+# Global in-memory store of cache data. Keyed by name, to provide
+# multiple named local memory caches.
+_caches = {}
+_expire_info = {}
+_locks = {}
+
+class LocMemCache(BaseCache):
+ def __init__(self, name, params):
+ BaseCache.__init__(self, params)
+ global _caches, _expire_info, _locks
+ self._cache = _caches.setdefault(name, {})
+ self._expire_info = _expire_info.setdefault(name, {})
+ self._lock = _locks.setdefault(name, RWLock())
+
+ def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ with self._lock.writer():
+ exp = self._expire_info.get(key)
+ if exp is None or exp <= time.time():
+ try:
+ pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
+ self._set(key, pickled, timeout)
+ return True
+ except pickle.PickleError:
+ pass
+ return False
+
+ def get(self, key, default=None, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ with self._lock.reader():
+ exp = self._expire_info.get(key, 0)
+ if exp is None or exp > time.time():
+ try:
+ pickled = self._cache[key]
+ return pickle.loads(pickled)
+ except pickle.PickleError:
+ return default
+ with self._lock.writer():
+ try:
+ del self._cache[key]
+ del self._expire_info[key]
+ except KeyError:
+ pass
+ return default
+
+ def _set(self, key, value, timeout=DEFAULT_TIMEOUT):
+ if len(self._cache) >= self._max_entries:
+ self._cull()
+ if timeout == DEFAULT_TIMEOUT:
+ timeout = self.default_timeout
+ expiry = None if timeout is None else time.time() + timeout
+ self._cache[key] = value
+ self._expire_info[key] = expiry
+
+ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ with self._lock.writer():
+ try:
+ pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
+ self._set(key, pickled, timeout)
+ except pickle.PickleError:
+ pass
+
+ def incr(self, key, delta=1, version=None):
+ value = self.get(key, version=version)
+ if value is None:
+ raise ValueError("Key '%s' not found" % key)
+ new_value = value + delta
+ key = self.make_key(key, version=version)
+ with self._lock.writer():
+ try:
+ pickled = pickle.dumps(new_value, pickle.HIGHEST_PROTOCOL)
+ self._cache[key] = pickled
+ except pickle.PickleError:
+ pass
+ return new_value
+
+ def has_key(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ with self._lock.reader():
+ exp = self._expire_info.get(key)
+ if exp is None:
+ return False
+ elif exp > time.time():
+ return True
+
+ with self._lock.writer():
+ try:
+ del self._cache[key]
+ del self._expire_info[key]
+ except KeyError:
+ pass
+ return False
+
+ def _cull(self):
+ if self._cull_frequency == 0:
+ self.clear()
+ else:
+ doomed = [k for (i, k) in enumerate(self._cache) if i % self._cull_frequency == 0]
+ for k in doomed:
+ self._delete(k)
+
+ def _delete(self, key):
+ try:
+ del self._cache[key]
+ except KeyError:
+ pass
+ try:
+ del self._expire_info[key]
+ except KeyError:
+ pass
+
+ def delete(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self.validate_key(key)
+ with self._lock.writer():
+ self._delete(key)
+
+ def clear(self):
+ self._cache.clear()
+ self._expire_info.clear()
+
+# For backwards compatibility
+class CacheClass(LocMemCache):
+ pass
diff --git a/lib/python2.7/site-packages/django/core/cache/backends/memcached.py b/lib/python2.7/site-packages/django/core/cache/backends/memcached.py
new file mode 100644
index 0000000..19e8b02
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/backends/memcached.py
@@ -0,0 +1,190 @@
+"Memcached cache backend"
+
+import time
+import pickle
+from threading import local
+
+from django.core.cache.backends.base import BaseCache, DEFAULT_TIMEOUT
+
+from django.utils import six
+from django.utils.encoding import force_str
+
+class BaseMemcachedCache(BaseCache):
+ def __init__(self, server, params, library, value_not_found_exception):
+ super(BaseMemcachedCache, self).__init__(params)
+ if isinstance(server, six.string_types):
+ self._servers = server.split(';')
+ else:
+ self._servers = server
+
+ # The exception type to catch from the underlying library for a key
+ # that was not found. This is a ValueError for python-memcache,
+ # pylibmc.NotFound for pylibmc, and cmemcache will return None without
+ # raising an exception.
+ self.LibraryValueNotFoundException = value_not_found_exception
+
+ self._lib = library
+ self._options = params.get('OPTIONS', None)
+
+ @property
+ def _cache(self):
+ """
+ Implements transparent thread-safe access to a memcached client.
+ """
+ if getattr(self, '_client', None) is None:
+ self._client = self._lib.Client(self._servers)
+
+ return self._client
+
+ def _get_memcache_timeout(self, timeout=DEFAULT_TIMEOUT):
+ """
+ Memcached deals with long (> 30 days) timeouts in a special
+ way. Call this function to obtain a safe value for your timeout.
+ """
+ if timeout == DEFAULT_TIMEOUT:
+ return self.default_timeout
+
+ if timeout is None:
+ # Using 0 in memcache sets a non-expiring timeout.
+ return 0
+ elif int(timeout) == 0:
+ # Other cache backends treat 0 as set-and-expire. To achieve this
+ # in memcache backends, a negative timeout must be passed.
+ timeout = -1
+
+ if timeout > 2592000: # 60*60*24*30, 30 days
+ # See http://code.google.com/p/memcached/wiki/FAQ
+ # "You can set expire times up to 30 days in the future. After that
+ # memcached interprets it as a date, and will expire the item after
+ # said date. This is a simple (but obscure) mechanic."
+ #
+ # This means that we have to switch to absolute timestamps.
+ timeout += int(time.time())
+ return int(timeout)
+
+ def make_key(self, key, version=None):
+ # Python 2 memcache requires the key to be a byte string.
+ return force_str(super(BaseMemcachedCache, self).make_key(key, version))
+
+ def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ return self._cache.add(key, value, self._get_memcache_timeout(timeout))
+
+ def get(self, key, default=None, version=None):
+ key = self.make_key(key, version=version)
+ val = self._cache.get(key)
+ if val is None:
+ return default
+ return val
+
+ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_key(key, version=version)
+ self._cache.set(key, value, self._get_memcache_timeout(timeout))
+
+ def delete(self, key, version=None):
+ key = self.make_key(key, version=version)
+ self._cache.delete(key)
+
+ def get_many(self, keys, version=None):
+ new_keys = [self.make_key(x, version=version) for x in keys]
+ ret = self._cache.get_multi(new_keys)
+ if ret:
+ _ = {}
+ m = dict(zip(new_keys, keys))
+ for k, v in ret.items():
+ _[m[k]] = v
+ ret = _
+ return ret
+
+ def close(self, **kwargs):
+ self._cache.disconnect_all()
+
+ def incr(self, key, delta=1, version=None):
+ key = self.make_key(key, version=version)
+ # memcached doesn't support a negative delta
+ if delta < 0:
+ return self._cache.decr(key, -delta)
+ try:
+ val = self._cache.incr(key, delta)
+
+ # python-memcache responds to incr on non-existent keys by
+ # raising a ValueError, pylibmc by raising a pylibmc.NotFound
+ # and Cmemcache returns None. In all cases,
+ # we should raise a ValueError though.
+ except self.LibraryValueNotFoundException:
+ val = None
+ if val is None:
+ raise ValueError("Key '%s' not found" % key)
+ return val
+
+ def decr(self, key, delta=1, version=None):
+ key = self.make_key(key, version=version)
+ # memcached doesn't support a negative delta
+ if delta < 0:
+ return self._cache.incr(key, -delta)
+ try:
+ val = self._cache.decr(key, delta)
+
+ # python-memcache responds to incr on non-existent keys by
+ # raising a ValueError, pylibmc by raising a pylibmc.NotFound
+ # and Cmemcache returns None. In all cases,
+ # we should raise a ValueError though.
+ except self.LibraryValueNotFoundException:
+ val = None
+ if val is None:
+ raise ValueError("Key '%s' not found" % key)
+ return val
+
+ def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
+ safe_data = {}
+ for key, value in data.items():
+ key = self.make_key(key, version=version)
+ safe_data[key] = value
+ self._cache.set_multi(safe_data, self._get_memcache_timeout(timeout))
+
+ def delete_many(self, keys, version=None):
+ l = lambda x: self.make_key(x, version=version)
+ self._cache.delete_multi(map(l, keys))
+
+ def clear(self):
+ self._cache.flush_all()
+
+class MemcachedCache(BaseMemcachedCache):
+ "An implementation of a cache binding using python-memcached"
+ def __init__(self, server, params):
+ import memcache
+ super(MemcachedCache, self).__init__(server, params,
+ library=memcache,
+ value_not_found_exception=ValueError)
+
+ @property
+ def _cache(self):
+ if getattr(self, '_client', None) is None:
+ self._client = self._lib.Client(self._servers, pickleProtocol=pickle.HIGHEST_PROTOCOL)
+ return self._client
+
+class PyLibMCCache(BaseMemcachedCache):
+ "An implementation of a cache binding using pylibmc"
+ def __init__(self, server, params):
+ import pylibmc
+ self._local = local()
+ super(PyLibMCCache, self).__init__(server, params,
+ library=pylibmc,
+ value_not_found_exception=pylibmc.NotFound)
+
+ @property
+ def _cache(self):
+ # PylibMC uses cache options as the 'behaviors' attribute.
+ # It also needs to use threadlocals, because some versions of
+ # PylibMC don't play well with the GIL.
+ client = getattr(self._local, 'client', None)
+ if client:
+ return client
+
+ client = self._lib.Client(self._servers)
+ if self._options:
+ client.behaviors = self._options
+
+ self._local.client = client
+
+ return client
diff --git a/lib/python2.7/site-packages/django/core/cache/utils.py b/lib/python2.7/site-packages/django/core/cache/utils.py
new file mode 100644
index 0000000..4310825
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/cache/utils.py
@@ -0,0 +1,15 @@
+from __future__ import absolute_import, unicode_literals
+
+import hashlib
+from django.utils.encoding import force_bytes
+from django.utils.http import urlquote
+
+TEMPLATE_FRAGMENT_KEY_TEMPLATE = 'template.cache.%s.%s'
+
+
+def make_template_fragment_key(fragment_name, vary_on=None):
+ if vary_on is None:
+ vary_on = ()
+ key = ':'.join([urlquote(var) for var in vary_on])
+ args = hashlib.md5(force_bytes(key))
+ return TEMPLATE_FRAGMENT_KEY_TEMPLATE % (fragment_name, args.hexdigest())
diff --git a/lib/python2.7/site-packages/django/core/checks/__init__.py b/lib/python2.7/site-packages/django/core/checks/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/checks/__init__.py
diff --git a/lib/python2.7/site-packages/django/core/checks/compatibility/__init__.py b/lib/python2.7/site-packages/django/core/checks/compatibility/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/checks/compatibility/__init__.py
diff --git a/lib/python2.7/site-packages/django/core/checks/compatibility/base.py b/lib/python2.7/site-packages/django/core/checks/compatibility/base.py
new file mode 100644
index 0000000..7fe52d2
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/checks/compatibility/base.py
@@ -0,0 +1,39 @@
+from __future__ import unicode_literals
+import warnings
+
+from django.core.checks.compatibility import django_1_6_0
+
+
+COMPAT_CHECKS = [
+ # Add new modules at the top, so we keep things in descending order.
+ # After two-three minor releases, old versions should get dropped.
+ django_1_6_0,
+]
+
+
+def check_compatibility():
+ """
+ Runs through compatibility checks to warn the user with an existing install
+ about changes in an up-to-date Django.
+
+ Modules should be located in ``django.core.compat_checks`` (typically one
+ per release of Django) & must have a ``run_checks`` function that runs
+ all the checks.
+
+ Returns a list of informational messages about incompatibilities.
+ """
+ messages = []
+
+ for check_module in COMPAT_CHECKS:
+ check = getattr(check_module, 'run_checks', None)
+
+ if check is None:
+ warnings.warn(
+ "The '%s' module lacks a " % check_module.__name__ +
+ "'run_checks' method, which is needed to verify compatibility."
+ )
+ continue
+
+ messages.extend(check())
+
+ return messages
diff --git a/lib/python2.7/site-packages/django/core/checks/compatibility/django_1_6_0.py b/lib/python2.7/site-packages/django/core/checks/compatibility/django_1_6_0.py
new file mode 100644
index 0000000..ef182bf
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/checks/compatibility/django_1_6_0.py
@@ -0,0 +1,62 @@
+from __future__ import unicode_literals
+
+from django.db import models
+
+def check_test_runner():
+ """
+ Checks if the user has *not* overridden the ``TEST_RUNNER`` setting &
+ warns them about the default behavior changes.
+
+ If the user has overridden that setting, we presume they know what they're
+ doing & avoid generating a message.
+ """
+ from django.conf import settings
+ new_default = 'django.test.runner.DiscoverRunner'
+ test_runner_setting = getattr(settings, 'TEST_RUNNER', new_default)
+
+ if test_runner_setting == new_default:
+ message = [
+ "Django 1.6 introduced a new default test runner ('%s')" % new_default,
+ "You should ensure your tests are all running & behaving as expected. See",
+ "https://docs.djangoproject.com/en/dev/releases/1.6/#new-test-runner",
+ "for more information.",
+ ]
+ return ' '.join(message)
+
+def check_boolean_field_default_value():
+ """
+ Checks if there are any BooleanFields without a default value, &
+ warns the user that the default has changed from False to Null.
+ """
+ fields = []
+ for cls in models.get_models():
+ opts = cls._meta
+ for f in opts.local_fields:
+ if isinstance(f, models.BooleanField) and not f.has_default():
+ fields.append(
+ '%s.%s: "%s"' % (opts.app_label, opts.object_name, f.name)
+ )
+ if fields:
+ fieldnames = ", ".join(fields)
+ message = [
+ "You have not set a default value for one or more BooleanFields:",
+ "%s." % fieldnames,
+ "In Django 1.6 the default value of BooleanField was changed from",
+ "False to Null when Field.default isn't defined. See",
+ "https://docs.djangoproject.com/en/1.6/ref/models/fields/#booleanfield"
+ "for more information."
+ ]
+ return ' '.join(message)
+
+
+def run_checks():
+ """
+ Required by the ``check`` management command, this returns a list of
+ messages from all the relevant check functions for this version of Django.
+ """
+ checks = [
+ check_test_runner(),
+ check_boolean_field_default_value(),
+ ]
+ # Filter out the ``None`` or empty strings.
+ return [output for output in checks if output]
diff --git a/lib/python2.7/site-packages/django/core/context_processors.py b/lib/python2.7/site-packages/django/core/context_processors.py
new file mode 100644
index 0000000..ca1ac68
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/context_processors.py
@@ -0,0 +1,75 @@
+"""
+A set of request processors that return dictionaries to be merged into a
+template context. Each function takes the request object as its only parameter
+and returns a dictionary to add to the context.
+
+These are referenced from the setting TEMPLATE_CONTEXT_PROCESSORS and used by
+RequestContext.
+"""
+from __future__ import unicode_literals
+
+from django.conf import settings
+from django.middleware.csrf import get_token
+from django.utils import six
+from django.utils.encoding import smart_text
+from django.utils.functional import lazy
+
+
+def csrf(request):
+ """
+ Context processor that provides a CSRF token, or the string 'NOTPROVIDED' if
+ it has not been provided by either a view decorator or the middleware
+ """
+ def _get_val():
+ token = get_token(request)
+ if token is None:
+ # In order to be able to provide debugging info in the
+ # case of misconfiguration, we use a sentinel value
+ # instead of returning an empty dict.
+ return 'NOTPROVIDED'
+ else:
+ return smart_text(token)
+ _get_val = lazy(_get_val, six.text_type)
+
+ return {'csrf_token': _get_val() }
+
+def debug(request):
+ "Returns context variables helpful for debugging."
+ context_extras = {}
+ if settings.DEBUG and request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS:
+ context_extras['debug'] = True
+ from django.db import connection
+ context_extras['sql_queries'] = connection.queries
+ return context_extras
+
+def i18n(request):
+ from django.utils import translation
+
+ context_extras = {}
+ context_extras['LANGUAGES'] = settings.LANGUAGES
+ context_extras['LANGUAGE_CODE'] = translation.get_language()
+ context_extras['LANGUAGE_BIDI'] = translation.get_language_bidi()
+
+ return context_extras
+
+def tz(request):
+ from django.utils import timezone
+
+ return {'TIME_ZONE': timezone.get_current_timezone_name()}
+
+def static(request):
+ """
+ Adds static-related context variables to the context.
+
+ """
+ return {'STATIC_URL': settings.STATIC_URL}
+
+def media(request):
+ """
+ Adds media-related context variables to the context.
+
+ """
+ return {'MEDIA_URL': settings.MEDIA_URL}
+
+def request(request):
+ return {'request': request}
diff --git a/lib/python2.7/site-packages/django/core/exceptions.py b/lib/python2.7/site-packages/django/core/exceptions.py
new file mode 100644
index 0000000..efec228
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/exceptions.py
@@ -0,0 +1,140 @@
+"""
+Global Django exception and warning classes.
+"""
+from functools import reduce
+import operator
+
+from django.utils.encoding import force_text
+
+
+class DjangoRuntimeWarning(RuntimeWarning):
+ pass
+
+
+class ObjectDoesNotExist(Exception):
+ """The requested object does not exist"""
+ silent_variable_failure = True
+
+
+class MultipleObjectsReturned(Exception):
+ """The query returned multiple objects when only one was expected."""
+ pass
+
+
+class SuspiciousOperation(Exception):
+ """The user did something suspicious"""
+
+
+class SuspiciousMultipartForm(SuspiciousOperation):
+ """Suspect MIME request in multipart form data"""
+ pass
+
+
+class SuspiciousFileOperation(SuspiciousOperation):
+ """A Suspicious filesystem operation was attempted"""
+ pass
+
+
+class DisallowedHost(SuspiciousOperation):
+ """HTTP_HOST header contains invalid value"""
+ pass
+
+
+class DisallowedRedirect(SuspiciousOperation):
+ """Redirect to scheme not in allowed list"""
+ pass
+
+
+class PermissionDenied(Exception):
+ """The user did not have permission to do that"""
+ pass
+
+
+class ViewDoesNotExist(Exception):
+ """The requested view does not exist"""
+ pass
+
+
+class MiddlewareNotUsed(Exception):
+ """This middleware is not used in this server configuration"""
+ pass
+
+
+class ImproperlyConfigured(Exception):
+ """Django is somehow improperly configured"""
+ pass
+
+
+class FieldError(Exception):
+ """Some kind of problem with a model field."""
+ pass
+
+
+NON_FIELD_ERRORS = '__all__'
+
+
+class ValidationError(Exception):
+ """An error while validating data."""
+ def __init__(self, message, code=None, params=None):
+ """
+ ValidationError can be passed any object that can be printed (usually
+ a string), a list of objects or a dictionary.
+ """
+ if isinstance(message, dict):
+ self.error_dict = message
+ elif isinstance(message, list):
+ self.error_list = message
+ else:
+ self.code = code
+ self.params = params
+ self.message = message
+ self.error_list = [self]
+
+ @property
+ def message_dict(self):
+ message_dict = {}
+ for field, messages in self.error_dict.items():
+ message_dict[field] = []
+ for message in messages:
+ if isinstance(message, ValidationError):
+ message_dict[field].extend(message.messages)
+ else:
+ message_dict[field].append(force_text(message))
+ return message_dict
+
+ @property
+ def messages(self):
+ if hasattr(self, 'error_dict'):
+ message_list = reduce(operator.add, self.error_dict.values())
+ else:
+ message_list = self.error_list
+
+ messages = []
+ for message in message_list:
+ if isinstance(message, ValidationError):
+ params = message.params
+ message = message.message
+ if params:
+ message %= params
+ message = force_text(message)
+ messages.append(message)
+ return messages
+
+ def __str__(self):
+ if hasattr(self, 'error_dict'):
+ return repr(self.message_dict)
+ return repr(self.messages)
+
+ def __repr__(self):
+ return 'ValidationError(%s)' % self
+
+ def update_error_dict(self, error_dict):
+ if hasattr(self, 'error_dict'):
+ if error_dict:
+ for k, v in self.error_dict.items():
+ error_dict.setdefault(k, []).extend(v)
+ else:
+ error_dict = self.error_dict
+ else:
+ error_dict[NON_FIELD_ERRORS] = self.error_list
+ return error_dict
diff --git a/lib/python2.7/site-packages/django/core/files/__init__.py b/lib/python2.7/site-packages/django/core/files/__init__.py
new file mode 100644
index 0000000..0c3ef57
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/files/__init__.py
@@ -0,0 +1 @@
+from django.core.files.base import File
diff --git a/lib/python2.7/site-packages/django/core/files/base.py b/lib/python2.7/site-packages/django/core/files/base.py
new file mode 100644
index 0000000..f07b2b4
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/files/base.py
@@ -0,0 +1,158 @@
+from __future__ import unicode_literals
+
+import os
+from io import BytesIO, StringIO, UnsupportedOperation
+
+from django.utils.encoding import smart_text
+from django.core.files.utils import FileProxyMixin
+from django.utils import six
+from django.utils.encoding import force_bytes, python_2_unicode_compatible
+
+@python_2_unicode_compatible
+class File(FileProxyMixin):
+ DEFAULT_CHUNK_SIZE = 64 * 2**10
+
+ def __init__(self, file, name=None):
+ self.file = file
+ if name is None:
+ name = getattr(file, 'name', None)
+ self.name = name
+ if hasattr(file, 'mode'):
+ self.mode = file.mode
+
+ def __str__(self):
+ return smart_text(self.name or '')
+
+ def __repr__(self):
+ return "<%s: %s>" % (self.__class__.__name__, self or "None")
+
+ def __bool__(self):
+ return bool(self.name)
+
+ def __nonzero__(self): # Python 2 compatibility
+ return type(self).__bool__(self)
+
+ def __len__(self):
+ return self.size
+
+ def _get_size(self):
+ if not hasattr(self, '_size'):
+ if hasattr(self.file, 'size'):
+ self._size = self.file.size
+ elif hasattr(self.file, 'name') and os.path.exists(self.file.name):
+ self._size = os.path.getsize(self.file.name)
+ elif hasattr(self.file, 'tell') and hasattr(self.file, 'seek'):
+ pos = self.file.tell()
+ self.file.seek(0, os.SEEK_END)
+ self._size = self.file.tell()
+ self.file.seek(pos)
+ else:
+ raise AttributeError("Unable to determine the file's size.")
+ return self._size
+
+ def _set_size(self, size):
+ self._size = size
+
+ size = property(_get_size, _set_size)
+
+ def _get_closed(self):
+ return not self.file or self.file.closed
+ closed = property(_get_closed)
+
+ def chunks(self, chunk_size=None):
+ """
+ Read the file and yield chucks of ``chunk_size`` bytes (defaults to
+ ``UploadedFile.DEFAULT_CHUNK_SIZE``).
+ """
+ if not chunk_size:
+ chunk_size = self.DEFAULT_CHUNK_SIZE
+
+ try:
+ self.seek(0)
+ except (AttributeError, UnsupportedOperation):
+ pass
+
+ while True:
+ data = self.read(chunk_size)
+ if not data:
+ break
+ yield data
+
+ def multiple_chunks(self, chunk_size=None):
+ """
+ Returns ``True`` if you can expect multiple chunks.
+
+ NB: If a particular file representation is in memory, subclasses should
+ always return ``False`` -- there's no good reason to read from memory in
+ chunks.
+ """
+ if not chunk_size:
+ chunk_size = self.DEFAULT_CHUNK_SIZE
+ return self.size > chunk_size
+
+ def __iter__(self):
+ # Iterate over this file-like object by newlines
+ buffer_ = None
+ for chunk in self.chunks():
+ chunk_buffer = BytesIO(chunk)
+
+ for line in chunk_buffer:
+ if buffer_:
+ line = buffer_ + line
+ buffer_ = None
+
+ # If this is the end of a line, yield
+ # otherwise, wait for the next round
+ if line[-1:] in (b'\n', b'\r'):
+ yield line
+ else:
+ buffer_ = line
+
+ if buffer_ is not None:
+ yield buffer_
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ self.close()
+
+ def open(self, mode=None):
+ if not self.closed:
+ self.seek(0)
+ elif self.name and os.path.exists(self.name):
+ self.file = open(self.name, mode or self.mode)
+ else:
+ raise ValueError("The file cannot be reopened.")
+
+ def close(self):
+ self.file.close()
+
+@python_2_unicode_compatible
+class ContentFile(File):
+ """
+ A File-like object that takes just raw content, rather than an actual file.
+ """
+ def __init__(self, content, name=None):
+ if six.PY3:
+ stream_class = StringIO if isinstance(content, six.text_type) else BytesIO
+ else:
+ stream_class = BytesIO
+ content = force_bytes(content)
+ super(ContentFile, self).__init__(stream_class(content), name=name)
+ self.size = len(content)
+
+ def __str__(self):
+ return 'Raw content'
+
+ def __bool__(self):
+ return True
+
+ def __nonzero__(self): # Python 2 compatibility
+ return type(self).__bool__(self)
+
+ def open(self, mode=None):
+ self.seek(0)
+
+ def close(self):
+ pass
diff --git a/lib/python2.7/site-packages/django/core/files/images.py b/lib/python2.7/site-packages/django/core/files/images.py
new file mode 100644
index 0000000..e1d6091
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/files/images.py
@@ -0,0 +1,74 @@
+"""
+Utility functions for handling images.
+
+Requires Pillow (or PIL), as you might imagine.
+"""
+import zlib
+
+from django.core.files import File
+
+
+class ImageFile(File):
+ """
+ A mixin for use alongside django.core.files.base.File, which provides
+ additional features for dealing with images.
+ """
+ def _get_width(self):
+ return self._get_image_dimensions()[0]
+ width = property(_get_width)
+
+ def _get_height(self):
+ return self._get_image_dimensions()[1]
+ height = property(_get_height)
+
+ def _get_image_dimensions(self):
+ if not hasattr(self, '_dimensions_cache'):
+ close = self.closed
+ self.open()
+ self._dimensions_cache = get_image_dimensions(self, close=close)
+ return self._dimensions_cache
+
+
+def get_image_dimensions(file_or_path, close=False):
+ """
+ Returns the (width, height) of an image, given an open file or a path. Set
+ 'close' to True to close the file at the end if it is initially in an open
+ state.
+ """
+ from django.utils.image import ImageFile as PILImageFile
+
+ p = PILImageFile.Parser()
+ if hasattr(file_or_path, 'read'):
+ file = file_or_path
+ file_pos = file.tell()
+ file.seek(0)
+ else:
+ file = open(file_or_path, 'rb')
+ close = True
+ try:
+ # Most of the time PIL only needs a small chunk to parse the image and
+ # get the dimensions, but with some TIFF files PIL needs to parse the
+ # whole file.
+ chunk_size = 1024
+ while 1:
+ data = file.read(chunk_size)
+ if not data:
+ break
+ try:
+ p.feed(data)
+ except zlib.error as e:
+ # ignore zlib complaining on truncated stream, just feed more
+ # data to parser (ticket #19457).
+ if e.args[0].startswith("Error -5"):
+ pass
+ else:
+ raise
+ if p.image:
+ return p.image.size
+ chunk_size = chunk_size*2
+ return None
+ finally:
+ if close:
+ file.close()
+ else:
+ file.seek(file_pos)
diff --git a/lib/python2.7/site-packages/django/core/files/locks.py b/lib/python2.7/site-packages/django/core/files/locks.py
new file mode 100644
index 0000000..6f0e4b9
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/files/locks.py
@@ -0,0 +1,69 @@
+"""
+Portable file locking utilities.
+
+Based partially on example by Jonathan Feignberg <jdf@pobox.com> in the Python
+Cookbook, licensed under the Python Software License.
+
+ http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/65203
+
+Example Usage::
+
+ >>> from django.core.files import locks
+ >>> with open('./file', 'wb') as f:
+ ... locks.lock(f, locks.LOCK_EX)
+ ... f.write('Django')
+"""
+
+__all__ = ('LOCK_EX','LOCK_SH','LOCK_NB','lock','unlock')
+
+system_type = None
+
+try:
+ import win32con
+ import win32file
+ import pywintypes
+ LOCK_EX = win32con.LOCKFILE_EXCLUSIVE_LOCK
+ LOCK_SH = 0
+ LOCK_NB = win32con.LOCKFILE_FAIL_IMMEDIATELY
+ __overlapped = pywintypes.OVERLAPPED()
+ system_type = 'nt'
+except (ImportError, AttributeError):
+ pass
+
+try:
+ import fcntl
+ LOCK_EX = fcntl.LOCK_EX
+ LOCK_SH = fcntl.LOCK_SH
+ LOCK_NB = fcntl.LOCK_NB
+ system_type = 'posix'
+except (ImportError, AttributeError):
+ pass
+
+def fd(f):
+ """Get a filedescriptor from something which could be a file or an fd."""
+ return f.fileno() if hasattr(f, 'fileno') else f
+
+if system_type == 'nt':
+ def lock(file, flags):
+ hfile = win32file._get_osfhandle(fd(file))
+ win32file.LockFileEx(hfile, flags, 0, -0x10000, __overlapped)
+
+ def unlock(file):
+ hfile = win32file._get_osfhandle(fd(file))
+ win32file.UnlockFileEx(hfile, 0, -0x10000, __overlapped)
+elif system_type == 'posix':
+ def lock(file, flags):
+ fcntl.lockf(fd(file), flags)
+
+ def unlock(file):
+ fcntl.lockf(fd(file), fcntl.LOCK_UN)
+else:
+ # File locking is not supported.
+ LOCK_EX = LOCK_SH = LOCK_NB = None
+
+ # Dummy functions that don't do anything.
+ def lock(file, flags):
+ pass
+
+ def unlock(file):
+ pass
diff --git a/lib/python2.7/site-packages/django/core/files/move.py b/lib/python2.7/site-packages/django/core/files/move.py
new file mode 100644
index 0000000..4bd739b
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/files/move.py
@@ -0,0 +1,89 @@
+"""
+Move a file in the safest way possible::
+
+ >>> from django.core.files.move import file_move_safe
+ >>> file_move_safe("/tmp/old_file", "/tmp/new_file")
+"""
+
+import os
+from django.core.files import locks
+
+try:
+ from shutil import copystat
+except ImportError:
+ import stat
+ def copystat(src, dst):
+ """Copy all stat info (mode bits, atime and mtime) from src to dst"""
+ st = os.stat(src)
+ mode = stat.S_IMODE(st.st_mode)
+ if hasattr(os, 'utime'):
+ os.utime(dst, (st.st_atime, st.st_mtime))
+ if hasattr(os, 'chmod'):
+ os.chmod(dst, mode)
+
+__all__ = ['file_move_safe']
+
+def _samefile(src, dst):
+ # Macintosh, Unix.
+ if hasattr(os.path,'samefile'):
+ try:
+ return os.path.samefile(src, dst)
+ except OSError:
+ return False
+
+ # All other platforms: check for same pathname.
+ return (os.path.normcase(os.path.abspath(src)) ==
+ os.path.normcase(os.path.abspath(dst)))
+
+def file_move_safe(old_file_name, new_file_name, chunk_size = 1024*64, allow_overwrite=False):
+ """
+ Moves a file from one location to another in the safest way possible.
+
+ First, tries ``os.rename``, which is simple but will break across filesystems.
+ If that fails, streams manually from one file to another in pure Python.
+
+ If the destination file exists and ``allow_overwrite`` is ``False``, this
+ function will throw an ``IOError``.
+ """
+
+ # There's no reason to move if we don't have to.
+ if _samefile(old_file_name, new_file_name):
+ return
+
+ try:
+ # If the destination file exists and allow_overwrite is False then raise an IOError
+ if not allow_overwrite and os.access(new_file_name, os.F_OK):
+ raise IOError("Destination file %s exists and allow_overwrite is False" % new_file_name)
+
+ os.rename(old_file_name, new_file_name)
+ return
+ except OSError:
+ # This will happen with os.rename if moving to another filesystem
+ # or when moving opened files on certain operating systems
+ pass
+
+ # first open the old file, so that it won't go away
+ with open(old_file_name, 'rb') as old_file:
+ # now open the new file, not forgetting allow_overwrite
+ fd = os.open(new_file_name, os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0) |
+ (os.O_EXCL if not allow_overwrite else 0))
+ try:
+ locks.lock(fd, locks.LOCK_EX)
+ current_chunk = None
+ while current_chunk != b'':
+ current_chunk = old_file.read(chunk_size)
+ os.write(fd, current_chunk)
+ finally:
+ locks.unlock(fd)
+ os.close(fd)
+ copystat(old_file_name, new_file_name)
+
+ try:
+ os.remove(old_file_name)
+ except OSError as e:
+ # Certain operating systems (Cygwin and Windows)
+ # fail when deleting opened files, ignore it. (For the
+ # systems where this happens, temporary files will be auto-deleted
+ # on close anyway.)
+ if getattr(e, 'winerror', 0) != 32 and getattr(e, 'errno', 0) != 13:
+ raise
diff --git a/lib/python2.7/site-packages/django/core/files/storage.py b/lib/python2.7/site-packages/django/core/files/storage.py
new file mode 100644
index 0000000..8012378
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/files/storage.py
@@ -0,0 +1,288 @@
+import os
+import errno
+import itertools
+from datetime import datetime
+
+from django.conf import settings
+from django.core.exceptions import SuspiciousFileOperation
+from django.core.files import locks, File
+from django.core.files.move import file_move_safe
+from django.utils.encoding import force_text, filepath_to_uri
+from django.utils.functional import LazyObject
+from django.utils.module_loading import import_by_path
+from django.utils.six.moves.urllib.parse import urljoin
+from django.utils.text import get_valid_filename
+from django.utils._os import safe_join, abspathu
+
+
+__all__ = ('Storage', 'FileSystemStorage', 'DefaultStorage', 'default_storage')
+
+class Storage(object):
+ """
+ A base storage class, providing some default behaviors that all other
+ storage systems can inherit or override, as necessary.
+ """
+
+ # The following methods represent a public interface to private methods.
+ # These shouldn't be overridden by subclasses unless absolutely necessary.
+
+ def open(self, name, mode='rb'):
+ """
+ Retrieves the specified file from storage.
+ """
+ return self._open(name, mode)
+
+ def save(self, name, content):
+ """
+ Saves new content to the file specified by name. The content should be
+ a proper File object or any python file-like object, ready to be read
+ from the beginning.
+ """
+ # Get the proper name for the file, as it will actually be saved.
+ if name is None:
+ name = content.name
+
+ if not hasattr(content, 'chunks'):
+ content = File(content)
+
+ name = self.get_available_name(name)
+ name = self._save(name, content)
+
+ # Store filenames with forward slashes, even on Windows
+ return force_text(name.replace('\\', '/'))
+
+ # These methods are part of the public API, with default implementations.
+
+ def get_valid_name(self, name):
+ """
+ Returns a filename, based on the provided filename, that's suitable for
+ use in the target storage system.
+ """
+ return get_valid_filename(name)
+
+ def get_available_name(self, name):
+ """
+ Returns a filename that's free on the target storage system, and
+ available for new content to be written to.
+ """
+ dir_name, file_name = os.path.split(name)
+ file_root, file_ext = os.path.splitext(file_name)
+ # If the filename already exists, add an underscore and a number (before
+ # the file extension, if one exists) to the filename until the generated
+ # filename doesn't exist.
+ count = itertools.count(1)
+ while self.exists(name):
+ # file_ext includes the dot.
+ name = os.path.join(dir_name, "%s_%s%s" % (file_root, next(count), file_ext))
+
+ return name
+
+ def path(self, name):
+ """
+ Returns a local filesystem path where the file can be retrieved using
+ Python's built-in open() function. Storage systems that can't be
+ accessed using open() should *not* implement this method.
+ """
+ raise NotImplementedError("This backend doesn't support absolute paths.")
+
+ # The following methods form the public API for storage systems, but with
+ # no default implementations. Subclasses must implement *all* of these.
+
+ def delete(self, name):
+ """
+ Deletes the specified file from the storage system.
+ """
+ raise NotImplementedError()
+
+ def exists(self, name):
+ """
+ Returns True if a file referened by the given name already exists in the
+ storage system, or False if the name is available for a new file.
+ """
+ raise NotImplementedError()
+
+ def listdir(self, path):
+ """
+ Lists the contents of the specified path, returning a 2-tuple of lists;
+ the first item being directories, the second item being files.
+ """
+ raise NotImplementedError()
+
+ def size(self, name):
+ """
+ Returns the total size, in bytes, of the file specified by name.
+ """
+ raise NotImplementedError()
+
+ def url(self, name):
+ """
+ Returns an absolute URL where the file's contents can be accessed
+ directly by a Web browser.
+ """
+ raise NotImplementedError()
+
+ def accessed_time(self, name):
+ """
+ Returns the last accessed time (as datetime object) of the file
+ specified by name.
+ """
+ raise NotImplementedError()
+
+ def created_time(self, name):
+ """
+ Returns the creation time (as datetime object) of the file
+ specified by name.
+ """
+ raise NotImplementedError()
+
+ def modified_time(self, name):
+ """
+ Returns the last modified time (as datetime object) of the file
+ specified by name.
+ """
+ raise NotImplementedError()
+
+class FileSystemStorage(Storage):
+ """
+ Standard filesystem storage
+ """
+
+ def __init__(self, location=None, base_url=None):
+ if location is None:
+ location = settings.MEDIA_ROOT
+ self.base_location = location
+ self.location = abspathu(self.base_location)
+ if base_url is None:
+ base_url = settings.MEDIA_URL
+ self.base_url = base_url
+
+ def _open(self, name, mode='rb'):
+ return File(open(self.path(name), mode))
+
+ def _save(self, name, content):
+ full_path = self.path(name)
+
+ # Create any intermediate directories that do not exist.
+ # Note that there is a race between os.path.exists and os.makedirs:
+ # if os.makedirs fails with EEXIST, the directory was created
+ # concurrently, and we can continue normally. Refs #16082.
+ directory = os.path.dirname(full_path)
+ if not os.path.exists(directory):
+ try:
+ os.makedirs(directory)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ if not os.path.isdir(directory):
+ raise IOError("%s exists and is not a directory." % directory)
+
+ # There's a potential race condition between get_available_name and
+ # saving the file; it's possible that two threads might return the
+ # same name, at which point all sorts of fun happens. So we need to
+ # try to create the file, but if it already exists we have to go back
+ # to get_available_name() and try again.
+
+ while True:
+ try:
+ # This file has a file path that we can move.
+ if hasattr(content, 'temporary_file_path'):
+ file_move_safe(content.temporary_file_path(), full_path)
+ content.close()
+
+ # This is a normal uploadedfile that we can stream.
+ else:
+ # This fun binary flag incantation makes os.open throw an
+ # OSError if the file already exists before we open it.
+ flags = (os.O_WRONLY | os.O_CREAT | os.O_EXCL |
+ getattr(os, 'O_BINARY', 0))
+ # The current umask value is masked out by os.open!
+ fd = os.open(full_path, flags, 0o666)
+ _file = None
+ try:
+ locks.lock(fd, locks.LOCK_EX)
+ for chunk in content.chunks():
+ if _file is None:
+ mode = 'wb' if isinstance(chunk, bytes) else 'wt'
+ _file = os.fdopen(fd, mode)
+ _file.write(chunk)
+ finally:
+ locks.unlock(fd)
+ if _file is not None:
+ _file.close()
+ else:
+ os.close(fd)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ # Ooops, the file exists. We need a new file name.
+ name = self.get_available_name(name)
+ full_path = self.path(name)
+ else:
+ raise
+ else:
+ # OK, the file save worked. Break out of the loop.
+ break
+
+ if settings.FILE_UPLOAD_PERMISSIONS is not None:
+ os.chmod(full_path, settings.FILE_UPLOAD_PERMISSIONS)
+
+ return name
+
+ def delete(self, name):
+ assert name, "The name argument is not allowed to be empty."
+ name = self.path(name)
+ # If the file exists, delete it from the filesystem.
+ # Note that there is a race between os.path.exists and os.remove:
+ # if os.remove fails with ENOENT, the file was removed
+ # concurrently, and we can continue normally.
+ if os.path.exists(name):
+ try:
+ os.remove(name)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ def exists(self, name):
+ return os.path.exists(self.path(name))
+
+ def listdir(self, path):
+ path = self.path(path)
+ directories, files = [], []
+ for entry in os.listdir(path):
+ if os.path.isdir(os.path.join(path, entry)):
+ directories.append(entry)
+ else:
+ files.append(entry)
+ return directories, files
+
+ def path(self, name):
+ try:
+ path = safe_join(self.location, name)
+ except ValueError:
+ raise SuspiciousFileOperation("Attempted access to '%s' denied." % name)
+ return os.path.normpath(path)
+
+ def size(self, name):
+ return os.path.getsize(self.path(name))
+
+ def url(self, name):
+ if self.base_url is None:
+ raise ValueError("This file is not accessible via a URL.")
+ return urljoin(self.base_url, filepath_to_uri(name))
+
+ def accessed_time(self, name):
+ return datetime.fromtimestamp(os.path.getatime(self.path(name)))
+
+ def created_time(self, name):
+ return datetime.fromtimestamp(os.path.getctime(self.path(name)))
+
+ def modified_time(self, name):
+ return datetime.fromtimestamp(os.path.getmtime(self.path(name)))
+
+def get_storage_class(import_path=None):
+ return import_by_path(import_path or settings.DEFAULT_FILE_STORAGE)
+
+class DefaultStorage(LazyObject):
+ def _setup(self):
+ self._wrapped = get_storage_class()()
+
+default_storage = DefaultStorage()
diff --git a/lib/python2.7/site-packages/django/core/files/temp.py b/lib/python2.7/site-packages/django/core/files/temp.py
new file mode 100644
index 0000000..3dcda17
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/files/temp.py
@@ -0,0 +1,65 @@
+"""
+The temp module provides a NamedTemporaryFile that can be re-opened on any
+platform. Most platforms use the standard Python tempfile.TemporaryFile class,
+but MS Windows users are given a custom class.
+
+This is needed because in Windows NT, the default implementation of
+NamedTemporaryFile uses the O_TEMPORARY flag, and thus cannot be reopened [1].
+
+1: http://mail.python.org/pipermail/python-list/2005-December/359474.html
+"""
+
+import os
+import tempfile
+from django.core.files.utils import FileProxyMixin
+
+__all__ = ('NamedTemporaryFile', 'gettempdir',)
+
+if os.name == 'nt':
+ class TemporaryFile(FileProxyMixin):
+ """
+ Temporary file object constructor that works in Windows and supports
+ reopening of the temporary file in windows.
+ """
+ def __init__(self, mode='w+b', bufsize=-1, suffix='', prefix='',
+ dir=None):
+ fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
+ dir=dir)
+ self.name = name
+ self.file = os.fdopen(fd, mode, bufsize)
+ self.close_called = False
+
+ # Because close can be called during shutdown
+ # we need to cache os.unlink and access it
+ # as self.unlink only
+ unlink = os.unlink
+
+ def close(self):
+ if not self.close_called:
+ self.close_called = True
+ try:
+ self.file.close()
+ except (OSError, IOError):
+ pass
+ try:
+ self.unlink(self.name)
+ except (OSError):
+ pass
+
+ @property
+ def closed(self):
+ """
+ This attribute needs to be accessible in certain situations,
+ because this class is supposed to mock the API of the class
+ tempfile.NamedTemporaryFile in the Python standard library.
+ """
+ return self.file.closed
+
+ def __del__(self):
+ self.close()
+
+ NamedTemporaryFile = TemporaryFile
+else:
+ NamedTemporaryFile = tempfile.NamedTemporaryFile
+
+gettempdir = tempfile.gettempdir
diff --git a/lib/python2.7/site-packages/django/core/files/uploadedfile.py b/lib/python2.7/site-packages/django/core/files/uploadedfile.py
new file mode 100644
index 0000000..39b99ff
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/files/uploadedfile.py
@@ -0,0 +1,125 @@
+"""
+Classes representing uploaded files.
+"""
+
+import os
+from io import BytesIO
+
+from django.conf import settings
+from django.core.files.base import File
+from django.core.files import temp as tempfile
+from django.utils.encoding import force_str
+
+__all__ = ('UploadedFile', 'TemporaryUploadedFile', 'InMemoryUploadedFile',
+ 'SimpleUploadedFile')
+
+class UploadedFile(File):
+ """
+ A abstract uploaded file (``TemporaryUploadedFile`` and
+ ``InMemoryUploadedFile`` are the built-in concrete subclasses).
+
+ An ``UploadedFile`` object behaves somewhat like a file object and
+ represents some file data that the user submitted with a form.
+ """
+ DEFAULT_CHUNK_SIZE = 64 * 2**10
+
+ def __init__(self, file=None, name=None, content_type=None, size=None, charset=None):
+ super(UploadedFile, self).__init__(file, name)
+ self.size = size
+ self.content_type = content_type
+ self.charset = charset
+
+ def __repr__(self):
+ return force_str("<%s: %s (%s)>" % (
+ self.__class__.__name__, self.name, self.content_type))
+
+ def _get_name(self):
+ return self._name
+
+ def _set_name(self, name):
+ # Sanitize the file name so that it can't be dangerous.
+ if name is not None:
+ # Just use the basename of the file -- anything else is dangerous.
+ name = os.path.basename(name)
+
+ # File names longer than 255 characters can cause problems on older OSes.
+ if len(name) > 255:
+ name, ext = os.path.splitext(name)
+ name = name[:255 - len(ext)] + ext
+
+ self._name = name
+
+ name = property(_get_name, _set_name)
+
+class TemporaryUploadedFile(UploadedFile):
+ """
+ A file uploaded to a temporary location (i.e. stream-to-disk).
+ """
+ def __init__(self, name, content_type, size, charset):
+ if settings.FILE_UPLOAD_TEMP_DIR:
+ file = tempfile.NamedTemporaryFile(suffix='.upload',
+ dir=settings.FILE_UPLOAD_TEMP_DIR)
+ else:
+ file = tempfile.NamedTemporaryFile(suffix='.upload')
+ super(TemporaryUploadedFile, self).__init__(file, name, content_type, size, charset)
+
+ def temporary_file_path(self):
+ """
+ Returns the full path of this file.
+ """
+ return self.file.name
+
+ def close(self):
+ try:
+ return self.file.close()
+ except OSError as e:
+ if e.errno != 2:
+ # Means the file was moved or deleted before the tempfile
+ # could unlink it. Still sets self.file.close_called and
+ # calls self.file.file.close() before the exception
+ raise
+
+class InMemoryUploadedFile(UploadedFile):
+ """
+ A file uploaded into memory (i.e. stream-to-memory).
+ """
+ def __init__(self, file, field_name, name, content_type, size, charset):
+ super(InMemoryUploadedFile, self).__init__(file, name, content_type, size, charset)
+ self.field_name = field_name
+
+ def open(self, mode=None):
+ self.file.seek(0)
+
+ def close(self):
+ pass
+
+ def chunks(self, chunk_size=None):
+ self.file.seek(0)
+ yield self.read()
+
+ def multiple_chunks(self, chunk_size=None):
+ # Since it's in memory, we'll never have multiple chunks.
+ return False
+
+
+class SimpleUploadedFile(InMemoryUploadedFile):
+ """
+ A simple representation of a file, which just has content, size, and a name.
+ """
+ def __init__(self, name, content, content_type='text/plain'):
+ content = content or b''
+ super(SimpleUploadedFile, self).__init__(BytesIO(content), None, name,
+ content_type, len(content), None)
+
+ def from_dict(cls, file_dict):
+ """
+ Creates a SimpleUploadedFile object from
+ a dictionary object with the following keys:
+ - filename
+ - content-type
+ - content
+ """
+ return cls(file_dict['filename'],
+ file_dict['content'],
+ file_dict.get('content-type', 'text/plain'))
+ from_dict = classmethod(from_dict)
diff --git a/lib/python2.7/site-packages/django/core/files/uploadhandler.py b/lib/python2.7/site-packages/django/core/files/uploadhandler.py
new file mode 100644
index 0000000..f5e95cf
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/files/uploadhandler.py
@@ -0,0 +1,203 @@
+"""
+Base file upload handler classes, and the built-in concrete subclasses
+"""
+
+from __future__ import unicode_literals
+
+from io import BytesIO
+
+from django.conf import settings
+from django.core.files.uploadedfile import TemporaryUploadedFile, InMemoryUploadedFile
+from django.utils.encoding import python_2_unicode_compatible
+from django.utils.module_loading import import_by_path
+
+__all__ = ['UploadFileException','StopUpload', 'SkipFile', 'FileUploadHandler',
+ 'TemporaryFileUploadHandler', 'MemoryFileUploadHandler',
+ 'load_handler', 'StopFutureHandlers']
+
+class UploadFileException(Exception):
+ """
+ Any error having to do with uploading files.
+ """
+ pass
+
+@python_2_unicode_compatible
+class StopUpload(UploadFileException):
+ """
+ This exception is raised when an upload must abort.
+ """
+ def __init__(self, connection_reset=False):
+ """
+ If ``connection_reset`` is ``True``, Django knows will halt the upload
+ without consuming the rest of the upload. This will cause the browser to
+ show a "connection reset" error.
+ """
+ self.connection_reset = connection_reset
+
+ def __str__(self):
+ if self.connection_reset:
+ return 'StopUpload: Halt current upload.'
+ else:
+ return 'StopUpload: Consume request data, then halt.'
+
+class SkipFile(UploadFileException):
+ """
+ This exception is raised by an upload handler that wants to skip a given file.
+ """
+ pass
+
+class StopFutureHandlers(UploadFileException):
+ """
+ Upload handers that have handled a file and do not want future handlers to
+ run should raise this exception instead of returning None.
+ """
+ pass
+
+class FileUploadHandler(object):
+ """
+ Base class for streaming upload handlers.
+ """
+ chunk_size = 64 * 2 ** 10 #: The default chunk size is 64 KB.
+
+ def __init__(self, request=None):
+ self.file_name = None
+ self.content_type = None
+ self.content_length = None
+ self.charset = None
+ self.request = request
+
+ def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
+ """
+ Handle the raw input from the client.
+
+ Parameters:
+
+ :input_data:
+ An object that supports reading via .read().
+ :META:
+ ``request.META``.
+ :content_length:
+ The (integer) value of the Content-Length header from the
+ client.
+ :boundary: The boundary from the Content-Type header. Be sure to
+ prepend two '--'.
+ """
+ pass
+
+ def new_file(self, field_name, file_name, content_type, content_length, charset=None):
+ """
+ Signal that a new file has been started.
+
+ Warning: As with any data from the client, you should not trust
+ content_length (and sometimes won't even get it).
+ """
+ self.field_name = field_name
+ self.file_name = file_name
+ self.content_type = content_type
+ self.content_length = content_length
+ self.charset = charset
+
+ def receive_data_chunk(self, raw_data, start):
+ """
+ Receive data from the streamed upload parser. ``start`` is the position
+ in the file of the chunk.
+ """
+ raise NotImplementedError()
+
+ def file_complete(self, file_size):
+ """
+ Signal that a file has completed. File size corresponds to the actual
+ size accumulated by all the chunks.
+
+ Subclasses should return a valid ``UploadedFile`` object.
+ """
+ raise NotImplementedError()
+
+ def upload_complete(self):
+ """
+ Signal that the upload is complete. Subclasses should perform cleanup
+ that is necessary for this handler.
+ """
+ pass
+
+class TemporaryFileUploadHandler(FileUploadHandler):
+ """
+ Upload handler that streams data into a temporary file.
+ """
+ def __init__(self, *args, **kwargs):
+ super(TemporaryFileUploadHandler, self).__init__(*args, **kwargs)
+
+ def new_file(self, file_name, *args, **kwargs):
+ """
+ Create the file object to append to as data is coming in.
+ """
+ super(TemporaryFileUploadHandler, self).new_file(file_name, *args, **kwargs)
+ self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset)
+
+ def receive_data_chunk(self, raw_data, start):
+ self.file.write(raw_data)
+
+ def file_complete(self, file_size):
+ self.file.seek(0)
+ self.file.size = file_size
+ return self.file
+
+class MemoryFileUploadHandler(FileUploadHandler):
+ """
+ File upload handler to stream uploads into memory (used for small files).
+ """
+
+ def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
+ """
+ Use the content_length to signal whether or not this handler should be in use.
+ """
+ # Check the content-length header to see if we should
+ # If the post is too large, we cannot use the Memory handler.
+ if content_length > settings.FILE_UPLOAD_MAX_MEMORY_SIZE:
+ self.activated = False
+ else:
+ self.activated = True
+
+ def new_file(self, *args, **kwargs):
+ super(MemoryFileUploadHandler, self).new_file(*args, **kwargs)
+ if self.activated:
+ self.file = BytesIO()
+ raise StopFutureHandlers()
+
+ def receive_data_chunk(self, raw_data, start):
+ """
+ Add the data to the BytesIO file.
+ """
+ if self.activated:
+ self.file.write(raw_data)
+ else:
+ return raw_data
+
+ def file_complete(self, file_size):
+ """
+ Return a file object if we're activated.
+ """
+ if not self.activated:
+ return
+
+ self.file.seek(0)
+ return InMemoryUploadedFile(
+ file = self.file,
+ field_name = self.field_name,
+ name = self.file_name,
+ content_type = self.content_type,
+ size = file_size,
+ charset = self.charset
+ )
+
+
+def load_handler(path, *args, **kwargs):
+ """
+ Given a path to a handler, return an instance of that handler.
+
+ E.g.::
+ >>> load_handler('django.core.files.uploadhandler.TemporaryFileUploadHandler', request)
+ <TemporaryFileUploadHandler object at 0x...>
+
+ """
+ return import_by_path(path)(*args, **kwargs)
diff --git a/lib/python2.7/site-packages/django/core/files/utils.py b/lib/python2.7/site-packages/django/core/files/utils.py
new file mode 100644
index 0000000..8cc212f
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/files/utils.py
@@ -0,0 +1,29 @@
+class FileProxyMixin(object):
+ """
+ A mixin class used to forward file methods to an underlaying file
+ object. The internal file object has to be called "file"::
+
+ class FileProxy(FileProxyMixin):
+ def __init__(self, file):
+ self.file = file
+ """
+
+ encoding = property(lambda self: self.file.encoding)
+ fileno = property(lambda self: self.file.fileno)
+ flush = property(lambda self: self.file.flush)
+ isatty = property(lambda self: self.file.isatty)
+ newlines = property(lambda self: self.file.newlines)
+ read = property(lambda self: self.file.read)
+ readinto = property(lambda self: self.file.readinto)
+ readline = property(lambda self: self.file.readline)
+ readlines = property(lambda self: self.file.readlines)
+ seek = property(lambda self: self.file.seek)
+ softspace = property(lambda self: self.file.softspace)
+ tell = property(lambda self: self.file.tell)
+ truncate = property(lambda self: self.file.truncate)
+ write = property(lambda self: self.file.write)
+ writelines = property(lambda self: self.file.writelines)
+ xreadlines = property(lambda self: self.file.xreadlines)
+
+ def __iter__(self):
+ return iter(self.file)
diff --git a/lib/python2.7/site-packages/django/core/handlers/__init__.py b/lib/python2.7/site-packages/django/core/handlers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/handlers/__init__.py
diff --git a/lib/python2.7/site-packages/django/core/handlers/base.py b/lib/python2.7/site-packages/django/core/handlers/base.py
new file mode 100644
index 0000000..5911865
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/handlers/base.py
@@ -0,0 +1,290 @@
+from __future__ import unicode_literals
+
+import logging
+import sys
+import types
+
+from django import http
+from django.conf import settings
+from django.core import urlresolvers
+from django.core import signals
+from django.core.exceptions import MiddlewareNotUsed, PermissionDenied, SuspiciousOperation
+from django.db import connections, transaction
+from django.utils.encoding import force_text
+from django.utils.module_loading import import_by_path
+from django.utils import six
+from django.views import debug
+
+logger = logging.getLogger('django.request')
+
+
+class BaseHandler(object):
+ # Changes that are always applied to a response (in this order).
+ response_fixes = [
+ http.fix_location_header,
+ http.conditional_content_removal,
+ http.fix_IE_for_attach,
+ http.fix_IE_for_vary,
+ ]
+
+ def __init__(self):
+ self._request_middleware = self._view_middleware = self._template_response_middleware = self._response_middleware = self._exception_middleware = None
+
+
+ def load_middleware(self):
+ """
+ Populate middleware lists from settings.MIDDLEWARE_CLASSES.
+
+ Must be called after the environment is fixed (see __call__ in subclasses).
+ """
+ self._view_middleware = []
+ self._template_response_middleware = []
+ self._response_middleware = []
+ self._exception_middleware = []
+
+ request_middleware = []
+ for middleware_path in settings.MIDDLEWARE_CLASSES:
+ mw_class = import_by_path(middleware_path)
+ try:
+ mw_instance = mw_class()
+ except MiddlewareNotUsed:
+ continue
+
+ if hasattr(mw_instance, 'process_request'):
+ request_middleware.append(mw_instance.process_request)
+ if hasattr(mw_instance, 'process_view'):
+ self._view_middleware.append(mw_instance.process_view)
+ if hasattr(mw_instance, 'process_template_response'):
+ self._template_response_middleware.insert(0, mw_instance.process_template_response)
+ if hasattr(mw_instance, 'process_response'):
+ self._response_middleware.insert(0, mw_instance.process_response)
+ if hasattr(mw_instance, 'process_exception'):
+ self._exception_middleware.insert(0, mw_instance.process_exception)
+
+ # We only assign to this when initialization is complete as it is used
+ # as a flag for initialization being complete.
+ self._request_middleware = request_middleware
+
+ def make_view_atomic(self, view):
+ non_atomic_requests = getattr(view, '_non_atomic_requests', set())
+ for db in connections.all():
+ if (db.settings_dict['ATOMIC_REQUESTS']
+ and db.alias not in non_atomic_requests):
+ view = transaction.atomic(using=db.alias)(view)
+ return view
+
+ def get_response(self, request):
+ "Returns an HttpResponse object for the given HttpRequest"
+
+ # Setup default url resolver for this thread, this code is outside
+ # the try/except so we don't get a spurious "unbound local
+ # variable" exception in the event an exception is raised before
+ # resolver is set
+ urlconf = settings.ROOT_URLCONF
+ urlresolvers.set_urlconf(urlconf)
+ resolver = urlresolvers.RegexURLResolver(r'^/', urlconf)
+ try:
+ response = None
+ # Apply request middleware
+ for middleware_method in self._request_middleware:
+ response = middleware_method(request)
+ if response:
+ break
+
+ if response is None:
+ if hasattr(request, 'urlconf'):
+ # Reset url resolver with a custom urlconf.
+ urlconf = request.urlconf
+ urlresolvers.set_urlconf(urlconf)
+ resolver = urlresolvers.RegexURLResolver(r'^/', urlconf)
+
+ resolver_match = resolver.resolve(request.path_info)
+ callback, callback_args, callback_kwargs = resolver_match
+ request.resolver_match = resolver_match
+
+ # Apply view middleware
+ for middleware_method in self._view_middleware:
+ response = middleware_method(request, callback, callback_args, callback_kwargs)
+ if response:
+ break
+
+ if response is None:
+ wrapped_callback = self.make_view_atomic(callback)
+ try:
+ response = wrapped_callback(request, *callback_args, **callback_kwargs)
+ except Exception as e:
+ # If the view raised an exception, run it through exception
+ # middleware, and if the exception middleware returns a
+ # response, use that. Otherwise, reraise the exception.
+ for middleware_method in self._exception_middleware:
+ response = middleware_method(request, e)
+ if response:
+ break
+ if response is None:
+ raise
+
+ # Complain if the view returned None (a common error).
+ if response is None:
+ if isinstance(callback, types.FunctionType): # FBV
+ view_name = callback.__name__
+ else: # CBV
+ view_name = callback.__class__.__name__ + '.__call__'
+ raise ValueError("The view %s.%s didn't return an HttpResponse object." % (callback.__module__, view_name))
+
+ # If the response supports deferred rendering, apply template
+ # response middleware and then render the response
+ if hasattr(response, 'render') and callable(response.render):
+ for middleware_method in self._template_response_middleware:
+ response = middleware_method(request, response)
+ response = response.render()
+
+ except http.Http404 as e:
+ logger.warning('Not Found: %s', request.path,
+ extra={
+ 'status_code': 404,
+ 'request': request
+ })
+ if settings.DEBUG:
+ response = debug.technical_404_response(request, e)
+ else:
+ try:
+ callback, param_dict = resolver.resolve404()
+ response = callback(request, **param_dict)
+ except:
+ signals.got_request_exception.send(sender=self.__class__, request=request)
+ response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
+
+ except PermissionDenied:
+ logger.warning(
+ 'Forbidden (Permission denied): %s', request.path,
+ extra={
+ 'status_code': 403,
+ 'request': request
+ })
+ try:
+ callback, param_dict = resolver.resolve403()
+ response = callback(request, **param_dict)
+ except:
+ signals.got_request_exception.send(
+ sender=self.__class__, request=request)
+ response = self.handle_uncaught_exception(request,
+ resolver, sys.exc_info())
+
+ except SuspiciousOperation as e:
+ # The request logger receives events for any problematic request
+ # The security logger receives events for all SuspiciousOperations
+ security_logger = logging.getLogger('django.security.%s' %
+ e.__class__.__name__)
+ security_logger.error(force_text(e))
+
+ try:
+ callback, param_dict = resolver.resolve400()
+ response = callback(request, **param_dict)
+ except:
+ signals.got_request_exception.send(
+ sender=self.__class__, request=request)
+ response = self.handle_uncaught_exception(request,
+ resolver, sys.exc_info())
+
+ except SystemExit:
+ # Allow sys.exit() to actually exit. See tickets #1023 and #4701
+ raise
+
+ except: # Handle everything else.
+ # Get the exception info now, in case another exception is thrown later.
+ signals.got_request_exception.send(sender=self.__class__, request=request)
+ response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
+
+ try:
+ # Apply response middleware, regardless of the response
+ for middleware_method in self._response_middleware:
+ response = middleware_method(request, response)
+ response = self.apply_response_fixes(request, response)
+ except: # Any exception should be gathered and handled
+ signals.got_request_exception.send(sender=self.__class__, request=request)
+ response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
+
+ return response
+
+ def handle_uncaught_exception(self, request, resolver, exc_info):
+ """
+ Processing for any otherwise uncaught exceptions (those that will
+ generate HTTP 500 responses). Can be overridden by subclasses who want
+ customised 500 handling.
+
+ Be *very* careful when overriding this because the error could be
+ caused by anything, so assuming something like the database is always
+ available would be an error.
+ """
+ if settings.DEBUG_PROPAGATE_EXCEPTIONS:
+ raise
+
+ logger.error('Internal Server Error: %s', request.path,
+ exc_info=exc_info,
+ extra={
+ 'status_code': 500,
+ 'request': request
+ }
+ )
+
+ if settings.DEBUG:
+ return debug.technical_500_response(request, *exc_info)
+
+ # If Http500 handler is not installed, re-raise last exception
+ if resolver.urlconf_module is None:
+ six.reraise(*exc_info)
+ # Return an HttpResponse that displays a friendly error message.
+ callback, param_dict = resolver.resolve500()
+ return callback(request, **param_dict)
+
+ def apply_response_fixes(self, request, response):
+ """
+ Applies each of the functions in self.response_fixes to the request and
+ response, modifying the response in the process. Returns the new
+ response.
+ """
+ for func in self.response_fixes:
+ response = func(request, response)
+ return response
+
+
+def get_path_info(environ):
+ """
+ Returns the HTTP request's PATH_INFO as a unicode string.
+ """
+ path_info = environ.get('PATH_INFO', str('/'))
+ # Under Python 3, strings in environ are decoded with ISO-8859-1;
+ # re-encode to recover the original bytestring provided by the web server.
+ if six.PY3:
+ path_info = path_info.encode('iso-8859-1')
+ # It'd be better to implement URI-to-IRI decoding, see #19508.
+ return path_info.decode('utf-8')
+
+
+def get_script_name(environ):
+ """
+ Returns the equivalent of the HTTP request's SCRIPT_NAME environment
+ variable. If Apache mod_rewrite has been used, returns what would have been
+ the script name prior to any rewriting (so it's the script name as seen
+ from the client's perspective), unless the FORCE_SCRIPT_NAME setting is
+ set (to anything).
+ """
+ if settings.FORCE_SCRIPT_NAME is not None:
+ return force_text(settings.FORCE_SCRIPT_NAME)
+
+ # If Apache's mod_rewrite had a whack at the URL, Apache set either
+ # SCRIPT_URL or REDIRECT_URL to the full resource URL before applying any
+ # rewrites. Unfortunately not every Web server (lighttpd!) passes this
+ # information through all the time, so FORCE_SCRIPT_NAME, above, is still
+ # needed.
+ script_url = environ.get('SCRIPT_URL', environ.get('REDIRECT_URL', str('')))
+ if script_url:
+ script_name = script_url[:-len(environ.get('PATH_INFO', str('')))]
+ else:
+ script_name = environ.get('SCRIPT_NAME', str(''))
+ # Under Python 3, strings in environ are decoded with ISO-8859-1;
+ # re-encode to recover the original bytestring provided by the web server.
+ if six.PY3:
+ script_name = script_name.encode('iso-8859-1')
+ # It'd be better to implement URI-to-IRI decoding, see #19508.
+ return script_name.decode('utf-8')
diff --git a/lib/python2.7/site-packages/django/core/handlers/wsgi.py b/lib/python2.7/site-packages/django/core/handlers/wsgi.py
new file mode 100644
index 0000000..adc8804
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/handlers/wsgi.py
@@ -0,0 +1,215 @@
+from __future__ import unicode_literals
+
+import codecs
+import logging
+import sys
+from io import BytesIO
+from threading import Lock
+
+from django import http
+from django.core import signals
+from django.core.handlers import base
+from django.core.urlresolvers import set_script_prefix
+from django.utils import datastructures
+from django.utils.encoding import force_str, force_text, iri_to_uri
+from django.utils import six
+
+# For backwards compatibility -- lots of code uses this in the wild!
+from django.http.response import REASON_PHRASES as STATUS_CODE_TEXT
+
+logger = logging.getLogger('django.request')
+
+
+class LimitedStream(object):
+ '''
+ LimitedStream wraps another stream in order to not allow reading from it
+ past specified amount of bytes.
+ '''
+ def __init__(self, stream, limit, buf_size=64 * 1024 * 1024):
+ self.stream = stream
+ self.remaining = limit
+ self.buffer = b''
+ self.buf_size = buf_size
+
+ def _read_limited(self, size=None):
+ if size is None or size > self.remaining:
+ size = self.remaining
+ if size == 0:
+ return b''
+ result = self.stream.read(size)
+ self.remaining -= len(result)
+ return result
+
+ def read(self, size=None):
+ if size is None:
+ result = self.buffer + self._read_limited()
+ self.buffer = b''
+ elif size < len(self.buffer):
+ result = self.buffer[:size]
+ self.buffer = self.buffer[size:]
+ else: # size >= len(self.buffer)
+ result = self.buffer + self._read_limited(size - len(self.buffer))
+ self.buffer = b''
+ return result
+
+ def readline(self, size=None):
+ while b'\n' not in self.buffer and \
+ (size is None or len(self.buffer) < size):
+ if size:
+ # since size is not None here, len(self.buffer) < size
+ chunk = self._read_limited(size - len(self.buffer))
+ else:
+ chunk = self._read_limited()
+ if not chunk:
+ break
+ self.buffer += chunk
+ sio = BytesIO(self.buffer)
+ if size:
+ line = sio.readline(size)
+ else:
+ line = sio.readline()
+ self.buffer = sio.read()
+ return line
+
+
+class WSGIRequest(http.HttpRequest):
+ def __init__(self, environ):
+ script_name = base.get_script_name(environ)
+ path_info = base.get_path_info(environ)
+ if not path_info:
+ # Sometimes PATH_INFO exists, but is empty (e.g. accessing
+ # the SCRIPT_NAME URL without a trailing slash). We really need to
+ # operate as if they'd requested '/'. Not amazingly nice to force
+ # the path like this, but should be harmless.
+ path_info = '/'
+ self.environ = environ
+ self.path_info = path_info
+ self.path = '%s/%s' % (script_name.rstrip('/'), path_info.lstrip('/'))
+ self.META = environ
+ self.META['PATH_INFO'] = path_info
+ self.META['SCRIPT_NAME'] = script_name
+ self.method = environ['REQUEST_METHOD'].upper()
+ _, content_params = self._parse_content_type(self.META.get('CONTENT_TYPE', ''))
+ if 'charset' in content_params:
+ try:
+ codecs.lookup(content_params['charset'])
+ except LookupError:
+ pass
+ else:
+ self.encoding = content_params['charset']
+ self._post_parse_error = False
+ try:
+ content_length = int(self.environ.get('CONTENT_LENGTH'))
+ except (ValueError, TypeError):
+ content_length = 0
+ self._stream = LimitedStream(self.environ['wsgi.input'], content_length)
+ self._read_started = False
+ self.resolver_match = None
+
+ def _is_secure(self):
+ return 'wsgi.url_scheme' in self.environ and self.environ['wsgi.url_scheme'] == 'https'
+
+ def _parse_content_type(self, ctype):
+ """
+ Media Types parsing according to RFC 2616, section 3.7.
+
+ Returns the data type and parameters. For example:
+ Input: "text/plain; charset=iso-8859-1"
+ Output: ('text/plain', {'charset': 'iso-8859-1'})
+ """
+ content_type, _, params = ctype.partition(';')
+ content_params = {}
+ for parameter in params.split(';'):
+ k, _, v = parameter.strip().partition('=')
+ content_params[k] = v
+ return content_type, content_params
+
+ def _get_request(self):
+ if not hasattr(self, '_request'):
+ self._request = datastructures.MergeDict(self.POST, self.GET)
+ return self._request
+
+ def _get_get(self):
+ if not hasattr(self, '_get'):
+ # The WSGI spec says 'QUERY_STRING' may be absent.
+ raw_query_string = self.environ.get('QUERY_STRING', str(''))
+ if six.PY3:
+ raw_query_string = raw_query_string.encode('iso-8859-1').decode('utf-8')
+ self._get = http.QueryDict(raw_query_string, encoding=self._encoding)
+ return self._get
+
+ def _set_get(self, get):
+ self._get = get
+
+ def _get_post(self):
+ if not hasattr(self, '_post'):
+ self._load_post_and_files()
+ return self._post
+
+ def _set_post(self, post):
+ self._post = post
+
+ def _get_cookies(self):
+ if not hasattr(self, '_cookies'):
+ raw_cookie = self.environ.get('HTTP_COOKIE', str(''))
+ if six.PY3:
+ raw_cookie = raw_cookie.encode('iso-8859-1').decode('utf-8')
+ self._cookies = http.parse_cookie(raw_cookie)
+ return self._cookies
+
+ def _set_cookies(self, cookies):
+ self._cookies = cookies
+
+ def _get_files(self):
+ if not hasattr(self, '_files'):
+ self._load_post_and_files()
+ return self._files
+
+ GET = property(_get_get, _set_get)
+ POST = property(_get_post, _set_post)
+ COOKIES = property(_get_cookies, _set_cookies)
+ FILES = property(_get_files)
+ REQUEST = property(_get_request)
+
+
+class WSGIHandler(base.BaseHandler):
+ initLock = Lock()
+ request_class = WSGIRequest
+
+ def __call__(self, environ, start_response):
+ # Set up middleware if needed. We couldn't do this earlier, because
+ # settings weren't available.
+ if self._request_middleware is None:
+ with self.initLock:
+ try:
+ # Check that middleware is still uninitialised.
+ if self._request_middleware is None:
+ self.load_middleware()
+ except:
+ # Unload whatever middleware we got
+ self._request_middleware = None
+ raise
+
+ set_script_prefix(base.get_script_name(environ))
+ signals.request_started.send(sender=self.__class__)
+ try:
+ request = self.request_class(environ)
+ except UnicodeDecodeError:
+ logger.warning('Bad Request (UnicodeDecodeError)',
+ exc_info=sys.exc_info(),
+ extra={
+ 'status_code': 400,
+ }
+ )
+ response = http.HttpResponseBadRequest()
+ else:
+ response = self.get_response(request)
+
+ response._handler_class = self.__class__
+
+ status = '%s %s' % (response.status_code, response.reason_phrase)
+ response_headers = [(str(k), str(v)) for k, v in response.items()]
+ for c in response.cookies.values():
+ response_headers.append((str('Set-Cookie'), str(c.output(header=''))))
+ start_response(force_str(status), response_headers)
+ return response
diff --git a/lib/python2.7/site-packages/django/core/mail/__init__.py b/lib/python2.7/site-packages/django/core/mail/__init__.py
new file mode 100644
index 0000000..fcff803
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/mail/__init__.py
@@ -0,0 +1,99 @@
+"""
+Tools for sending email.
+"""
+from __future__ import unicode_literals
+
+from django.conf import settings
+from django.utils.module_loading import import_by_path
+
+# Imported for backwards compatibility, and for the sake
+# of a cleaner namespace. These symbols used to be in
+# django/core/mail.py before the introduction of email
+# backends and the subsequent reorganization (See #10355)
+from django.core.mail.utils import CachedDnsName, DNS_NAME
+from django.core.mail.message import (
+ EmailMessage, EmailMultiAlternatives,
+ SafeMIMEText, SafeMIMEMultipart,
+ DEFAULT_ATTACHMENT_MIME_TYPE, make_msgid,
+ BadHeaderError, forbid_multi_line_headers)
+
+
+def get_connection(backend=None, fail_silently=False, **kwds):
+ """Load an email backend and return an instance of it.
+
+ If backend is None (default) settings.EMAIL_BACKEND is used.
+
+ Both fail_silently and other keyword arguments are used in the
+ constructor of the backend.
+ """
+ klass = import_by_path(backend or settings.EMAIL_BACKEND)
+ return klass(fail_silently=fail_silently, **kwds)
+
+
+def send_mail(subject, message, from_email, recipient_list,
+ fail_silently=False, auth_user=None, auth_password=None,
+ connection=None):
+ """
+ Easy wrapper for sending a single message to a recipient list. All members
+ of the recipient list will see the other recipients in the 'To' field.
+
+ If auth_user is None, the EMAIL_HOST_USER setting is used.
+ If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
+
+ Note: The API for this method is frozen. New code wanting to extend the
+ functionality should use the EmailMessage class directly.
+ """
+ connection = connection or get_connection(username=auth_user,
+ password=auth_password,
+ fail_silently=fail_silently)
+ return EmailMessage(subject, message, from_email, recipient_list,
+ connection=connection).send()
+
+
+def send_mass_mail(datatuple, fail_silently=False, auth_user=None,
+ auth_password=None, connection=None):
+ """
+ Given a datatuple of (subject, message, from_email, recipient_list), sends
+ each message to each recipient list. Returns the number of emails sent.
+
+ If from_email is None, the DEFAULT_FROM_EMAIL setting is used.
+ If auth_user and auth_password are set, they're used to log in.
+ If auth_user is None, the EMAIL_HOST_USER setting is used.
+ If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
+
+ Note: The API for this method is frozen. New code wanting to extend the
+ functionality should use the EmailMessage class directly.
+ """
+ connection = connection or get_connection(username=auth_user,
+ password=auth_password,
+ fail_silently=fail_silently)
+ messages = [EmailMessage(subject, message, sender, recipient,
+ connection=connection)
+ for subject, message, sender, recipient in datatuple]
+ return connection.send_messages(messages)
+
+
+def mail_admins(subject, message, fail_silently=False, connection=None,
+ html_message=None):
+ """Sends a message to the admins, as defined by the ADMINS setting."""
+ if not settings.ADMINS:
+ return
+ mail = EmailMultiAlternatives('%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject),
+ message, settings.SERVER_EMAIL, [a[1] for a in settings.ADMINS],
+ connection=connection)
+ if html_message:
+ mail.attach_alternative(html_message, 'text/html')
+ mail.send(fail_silently=fail_silently)
+
+
+def mail_managers(subject, message, fail_silently=False, connection=None,
+ html_message=None):
+ """Sends a message to the managers, as defined by the MANAGERS setting."""
+ if not settings.MANAGERS:
+ return
+ mail = EmailMultiAlternatives('%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject),
+ message, settings.SERVER_EMAIL, [a[1] for a in settings.MANAGERS],
+ connection=connection)
+ if html_message:
+ mail.attach_alternative(html_message, 'text/html')
+ mail.send(fail_silently=fail_silently)
diff --git a/lib/python2.7/site-packages/django/core/mail/backends/__init__.py b/lib/python2.7/site-packages/django/core/mail/backends/__init__.py
new file mode 100644
index 0000000..5973b49
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/mail/backends/__init__.py
@@ -0,0 +1 @@
+# Mail backends shipped with Django.
diff --git a/lib/python2.7/site-packages/django/core/mail/backends/base.py b/lib/python2.7/site-packages/django/core/mail/backends/base.py
new file mode 100644
index 0000000..9a30928
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/mail/backends/base.py
@@ -0,0 +1,39 @@
+"""Base email backend class."""
+
+class BaseEmailBackend(object):
+ """
+ Base class for email backend implementations.
+
+ Subclasses must at least overwrite send_messages().
+ """
+ def __init__(self, fail_silently=False, **kwargs):
+ self.fail_silently = fail_silently
+
+ def open(self):
+ """Open a network connection.
+
+ This method can be overwritten by backend implementations to
+ open a network connection.
+
+ It's up to the backend implementation to track the status of
+ a network connection if it's needed by the backend.
+
+ This method can be called by applications to force a single
+ network connection to be used when sending mails. See the
+ send_messages() method of the SMTP backend for a reference
+ implementation.
+
+ The default implementation does nothing.
+ """
+ pass
+
+ def close(self):
+ """Close a network connection."""
+ pass
+
+ def send_messages(self, email_messages):
+ """
+ Sends one or more EmailMessage objects and returns the number of email
+ messages sent.
+ """
+ raise NotImplementedError
diff --git a/lib/python2.7/site-packages/django/core/mail/backends/console.py b/lib/python2.7/site-packages/django/core/mail/backends/console.py
new file mode 100644
index 0000000..1175ed1
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/mail/backends/console.py
@@ -0,0 +1,41 @@
+"""
+Email backend that writes messages to console instead of sending them.
+"""
+import sys
+import threading
+
+from django.core.mail.backends.base import BaseEmailBackend
+from django.utils import six
+
+class EmailBackend(BaseEmailBackend):
+ def __init__(self, *args, **kwargs):
+ self.stream = kwargs.pop('stream', sys.stdout)
+ self._lock = threading.RLock()
+ super(EmailBackend, self).__init__(*args, **kwargs)
+
+ def write_message(self, message):
+ msg = message.message()
+ msg_data = msg.as_bytes()
+ if six.PY3:
+ charset = msg.get_charset().get_output_charset() if msg.get_charset() else 'utf-8'
+ msg_data = msg_data.decode(charset)
+ self.stream.write('%s\n' % msg_data)
+ self.stream.write('-' * 79)
+ self.stream.write('\n')
+
+ def send_messages(self, email_messages):
+ """Write all messages to the stream in a thread-safe way."""
+ if not email_messages:
+ return
+ with self._lock:
+ try:
+ stream_created = self.open()
+ for message in email_messages:
+ self.write_message(message)
+ self.stream.flush() # flush after each message
+ if stream_created:
+ self.close()
+ except:
+ if not self.fail_silently:
+ raise
+ return len(email_messages)
diff --git a/lib/python2.7/site-packages/django/core/mail/backends/dummy.py b/lib/python2.7/site-packages/django/core/mail/backends/dummy.py
new file mode 100644
index 0000000..273aa0d
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/mail/backends/dummy.py
@@ -0,0 +1,9 @@
+"""
+Dummy email backend that does nothing.
+"""
+
+from django.core.mail.backends.base import BaseEmailBackend
+
+class EmailBackend(BaseEmailBackend):
+ def send_messages(self, email_messages):
+ return len(email_messages)
diff --git a/lib/python2.7/site-packages/django/core/mail/backends/filebased.py b/lib/python2.7/site-packages/django/core/mail/backends/filebased.py
new file mode 100644
index 0000000..d72a547
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/mail/backends/filebased.py
@@ -0,0 +1,65 @@
+"""Email backend that writes messages to a file."""
+
+import datetime
+import os
+
+from django.conf import settings
+from django.core.exceptions import ImproperlyConfigured
+from django.core.mail.backends.console import EmailBackend as ConsoleEmailBackend
+from django.utils import six
+
+class EmailBackend(ConsoleEmailBackend):
+ def __init__(self, *args, **kwargs):
+ self._fname = None
+ if 'file_path' in kwargs:
+ self.file_path = kwargs.pop('file_path')
+ else:
+ self.file_path = getattr(settings, 'EMAIL_FILE_PATH',None)
+ # Make sure self.file_path is a string.
+ if not isinstance(self.file_path, six.string_types):
+ raise ImproperlyConfigured('Path for saving emails is invalid: %r' % self.file_path)
+ self.file_path = os.path.abspath(self.file_path)
+ # Make sure that self.file_path is an directory if it exists.
+ if os.path.exists(self.file_path) and not os.path.isdir(self.file_path):
+ raise ImproperlyConfigured('Path for saving email messages exists, but is not a directory: %s' % self.file_path)
+ # Try to create it, if it not exists.
+ elif not os.path.exists(self.file_path):
+ try:
+ os.makedirs(self.file_path)
+ except OSError as err:
+ raise ImproperlyConfigured('Could not create directory for saving email messages: %s (%s)' % (self.file_path, err))
+ # Make sure that self.file_path is writable.
+ if not os.access(self.file_path, os.W_OK):
+ raise ImproperlyConfigured('Could not write to directory: %s' % self.file_path)
+ # Finally, call super().
+ # Since we're using the console-based backend as a base,
+ # force the stream to be None, so we don't default to stdout
+ kwargs['stream'] = None
+ super(EmailBackend, self).__init__(*args, **kwargs)
+
+ def write_message(self, message):
+ self.stream.write(message.message().as_bytes() + b'\n')
+ self.stream.write(b'-' * 79)
+ self.stream.write(b'\n')
+
+ def _get_filename(self):
+ """Return a unique file name."""
+ if self._fname is None:
+ timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
+ fname = "%s-%s.log" % (timestamp, abs(id(self)))
+ self._fname = os.path.join(self.file_path, fname)
+ return self._fname
+
+ def open(self):
+ if self.stream is None:
+ self.stream = open(self._get_filename(), 'ab')
+ return True
+ return False
+
+ def close(self):
+ try:
+ if self.stream is not None:
+ self.stream.close()
+ finally:
+ self.stream = None
+
diff --git a/lib/python2.7/site-packages/django/core/mail/backends/locmem.py b/lib/python2.7/site-packages/django/core/mail/backends/locmem.py
new file mode 100644
index 0000000..6826d09
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/mail/backends/locmem.py
@@ -0,0 +1,26 @@
+"""
+Backend for test environment.
+"""
+
+from django.core import mail
+from django.core.mail.backends.base import BaseEmailBackend
+
+class EmailBackend(BaseEmailBackend):
+ """A email backend for use during test sessions.
+
+ The test connection stores email messages in a dummy outbox,
+ rather than sending them out on the wire.
+
+ The dummy outbox is accessible through the outbox instance attribute.
+ """
+ def __init__(self, *args, **kwargs):
+ super(EmailBackend, self).__init__(*args, **kwargs)
+ if not hasattr(mail, 'outbox'):
+ mail.outbox = []
+
+ def send_messages(self, messages):
+ """Redirect messages to the dummy outbox"""
+ for message in messages: # .message() triggers header validation
+ message.message()
+ mail.outbox.extend(messages)
+ return len(messages)
diff --git a/lib/python2.7/site-packages/django/core/mail/backends/smtp.py b/lib/python2.7/site-packages/django/core/mail/backends/smtp.py
new file mode 100644
index 0000000..57ee967
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/mail/backends/smtp.py
@@ -0,0 +1,115 @@
+"""SMTP email backend class."""
+import smtplib
+import ssl
+import threading
+
+from django.conf import settings
+from django.core.mail.backends.base import BaseEmailBackend
+from django.core.mail.utils import DNS_NAME
+from django.core.mail.message import sanitize_address
+
+
+class EmailBackend(BaseEmailBackend):
+ """
+ A wrapper that manages the SMTP network connection.
+ """
+ def __init__(self, host=None, port=None, username=None, password=None,
+ use_tls=None, fail_silently=False, **kwargs):
+ super(EmailBackend, self).__init__(fail_silently=fail_silently)
+ self.host = host or settings.EMAIL_HOST
+ self.port = port or settings.EMAIL_PORT
+ if username is None:
+ self.username = settings.EMAIL_HOST_USER
+ else:
+ self.username = username
+ if password is None:
+ self.password = settings.EMAIL_HOST_PASSWORD
+ else:
+ self.password = password
+ if use_tls is None:
+ self.use_tls = settings.EMAIL_USE_TLS
+ else:
+ self.use_tls = use_tls
+ self.connection = None
+ self._lock = threading.RLock()
+
+ def open(self):
+ """
+ Ensures we have a connection to the email server. Returns whether or
+ not a new connection was required (True or False).
+ """
+ if self.connection:
+ # Nothing to do if the connection is already open.
+ return False
+ try:
+ # If local_hostname is not specified, socket.getfqdn() gets used.
+ # For performance, we use the cached FQDN for local_hostname.
+ self.connection = smtplib.SMTP(self.host, self.port,
+ local_hostname=DNS_NAME.get_fqdn())
+ if self.use_tls:
+ self.connection.ehlo()
+ self.connection.starttls()
+ self.connection.ehlo()
+ if self.username and self.password:
+ self.connection.login(self.username, self.password)
+ return True
+ except:
+ if not self.fail_silently:
+ raise
+
+ def close(self):
+ """Closes the connection to the email server."""
+ if self.connection is None:
+ return
+ try:
+ try:
+ self.connection.quit()
+ except (ssl.SSLError, smtplib.SMTPServerDisconnected):
+ # This happens when calling quit() on a TLS connection
+ # sometimes, or when the connection was already disconnected
+ # by the server.
+ self.connection.close()
+ except:
+ if self.fail_silently:
+ return
+ raise
+ finally:
+ self.connection = None
+
+ def send_messages(self, email_messages):
+ """
+ Sends one or more EmailMessage objects and returns the number of email
+ messages sent.
+ """
+ if not email_messages:
+ return
+ with self._lock:
+ new_conn_created = self.open()
+ if not self.connection:
+ # We failed silently on open().
+ # Trying to send would be pointless.
+ return
+ num_sent = 0
+ for message in email_messages:
+ sent = self._send(message)
+ if sent:
+ num_sent += 1
+ if new_conn_created:
+ self.close()
+ return num_sent
+
+ def _send(self, email_message):
+ """A helper method that does the actual sending."""
+ if not email_message.recipients():
+ return False
+ from_email = sanitize_address(email_message.from_email, email_message.encoding)
+ recipients = [sanitize_address(addr, email_message.encoding)
+ for addr in email_message.recipients()]
+ message = email_message.message()
+ try:
+ self.connection.sendmail(from_email, recipients, message.as_bytes())
+ except smtplib.SMTPException:
+ if not self.fail_silently:
+ raise
+ return False
+ return True
diff --git a/lib/python2.7/site-packages/django/core/mail/message.py b/lib/python2.7/site-packages/django/core/mail/message.py
new file mode 100644
index 0000000..95762ff
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/mail/message.py
@@ -0,0 +1,392 @@
+from __future__ import unicode_literals
+
+import mimetypes
+import os
+import random
+import sys
+import time
+from email import generator
+from email import charset as Charset, encoders as Encoders
+from email.mime.text import MIMEText
+from email.mime.multipart import MIMEMultipart
+from email.mime.base import MIMEBase
+from email.header import Header
+from email.utils import formatdate, getaddresses, formataddr, parseaddr
+
+from django.conf import settings
+from django.core.mail.utils import DNS_NAME
+from django.utils.encoding import force_text
+from django.utils import six
+
+
+# Don't BASE64-encode UTF-8 messages so that we avoid unwanted attention from
+# some spam filters.
+utf8_charset = Charset.Charset('utf-8')
+utf8_charset.body_encoding = None # Python defaults to BASE64
+
+
+# Default MIME type to use on attachments (if it is not explicitly given
+# and cannot be guessed).
+DEFAULT_ATTACHMENT_MIME_TYPE = 'application/octet-stream'
+
+
+class BadHeaderError(ValueError):
+ pass
+
+
+# Copied from Python standard library, with the following modifications:
+# * Used cached hostname for performance.
+# * Added try/except to support lack of getpid() in Jython (#5496).
+def make_msgid(idstring=None):
+ """Returns a string suitable for RFC 2822 compliant Message-ID, e.g:
+
+ <20020201195627.33539.96671@nightshade.la.mastaler.com>
+
+ Optional idstring if given is a string used to strengthen the
+ uniqueness of the message id.
+ """
+ timeval = time.time()
+ utcdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(timeval))
+ try:
+ pid = os.getpid()
+ except AttributeError:
+ # No getpid() in Jython, for example.
+ pid = 1
+ randint = random.randrange(100000)
+ if idstring is None:
+ idstring = ''
+ else:
+ idstring = '.' + idstring
+ idhost = DNS_NAME
+ msgid = '<%s.%s.%s%s@%s>' % (utcdate, pid, randint, idstring, idhost)
+ return msgid
+
+
+# Header names that contain structured address data (RFC #5322)
+ADDRESS_HEADERS = set([
+ 'from',
+ 'sender',
+ 'reply-to',
+ 'to',
+ 'cc',
+ 'bcc',
+ 'resent-from',
+ 'resent-sender',
+ 'resent-to',
+ 'resent-cc',
+ 'resent-bcc',
+])
+
+
+def forbid_multi_line_headers(name, val, encoding):
+ """Forbids multi-line headers, to prevent header injection."""
+ encoding = encoding or settings.DEFAULT_CHARSET
+ val = force_text(val)
+ if '\n' in val or '\r' in val:
+ raise BadHeaderError("Header values can't contain newlines (got %r for header %r)" % (val, name))
+ try:
+ val.encode('ascii')
+ except UnicodeEncodeError:
+ if name.lower() in ADDRESS_HEADERS:
+ val = ', '.join(sanitize_address(addr, encoding)
+ for addr in getaddresses((val,)))
+ else:
+ val = Header(val, encoding).encode()
+ else:
+ if name.lower() == 'subject':
+ val = Header(val).encode()
+ return str(name), val
+
+
+def sanitize_address(addr, encoding):
+ if isinstance(addr, six.string_types):
+ addr = parseaddr(force_text(addr))
+ nm, addr = addr
+ # This try-except clause is needed on Python 3 < 3.2.4
+ # http://bugs.python.org/issue14291
+ try:
+ nm = Header(nm, encoding).encode()
+ except UnicodeEncodeError:
+ nm = Header(nm, 'utf-8').encode()
+ try:
+ addr.encode('ascii')
+ except UnicodeEncodeError: # IDN
+ if '@' in addr:
+ localpart, domain = addr.split('@', 1)
+ localpart = str(Header(localpart, encoding))
+ domain = domain.encode('idna').decode('ascii')
+ addr = '@'.join([localpart, domain])
+ else:
+ addr = Header(addr, encoding).encode()
+ return formataddr((nm, addr))
+
+
+class MIMEMixin():
+ def as_string(self, unixfrom=False):
+ """Return the entire formatted message as a string.
+ Optional `unixfrom' when True, means include the Unix From_ envelope
+ header.
+
+ This overrides the default as_string() implementation to not mangle
+ lines that begin with 'From '. See bug #13433 for details.
+ """
+ fp = six.StringIO()
+ g = generator.Generator(fp, mangle_from_=False)
+ g.flatten(self, unixfrom=unixfrom)
+ return fp.getvalue()
+
+ if six.PY2:
+ as_bytes = as_string
+ else:
+ def as_bytes(self, unixfrom=False):
+ """Return the entire formatted message as bytes.
+ Optional `unixfrom' when True, means include the Unix From_ envelope
+ header.
+
+ This overrides the default as_bytes() implementation to not mangle
+ lines that begin with 'From '. See bug #13433 for details.
+ """
+ fp = six.BytesIO()
+ g = generator.BytesGenerator(fp, mangle_from_=False)
+ g.flatten(self, unixfrom=unixfrom)
+ return fp.getvalue()
+
+
+class SafeMIMEText(MIMEMixin, MIMEText):
+
+ def __init__(self, text, subtype, charset):
+ self.encoding = charset
+ if charset == 'utf-8':
+ # Unfortunately, Python doesn't support setting a Charset instance
+ # as MIMEText init parameter (http://bugs.python.org/issue16324).
+ # We do it manually and trigger re-encoding of the payload.
+ MIMEText.__init__(self, text, subtype, None)
+ del self['Content-Transfer-Encoding']
+ # Workaround for versions without http://bugs.python.org/issue19063
+ if (3, 2) < sys.version_info < (3, 3, 4):
+ payload = text.encode(utf8_charset.output_charset)
+ self._payload = payload.decode('ascii', 'surrogateescape')
+ self.set_charset(utf8_charset)
+ else:
+ self.set_payload(text, utf8_charset)
+ self.replace_header('Content-Type', 'text/%s; charset="%s"' % (subtype, charset))
+ else:
+ MIMEText.__init__(self, text, subtype, charset)
+
+ def __setitem__(self, name, val):
+ name, val = forbid_multi_line_headers(name, val, self.encoding)
+ MIMEText.__setitem__(self, name, val)
+
+
+class SafeMIMEMultipart(MIMEMixin, MIMEMultipart):
+
+ def __init__(self, _subtype='mixed', boundary=None, _subparts=None, encoding=None, **_params):
+ self.encoding = encoding
+ MIMEMultipart.__init__(self, _subtype, boundary, _subparts, **_params)
+
+ def __setitem__(self, name, val):
+ name, val = forbid_multi_line_headers(name, val, self.encoding)
+ MIMEMultipart.__setitem__(self, name, val)
+
+
+class EmailMessage(object):
+ """
+ A container for email information.
+ """
+ content_subtype = 'plain'
+ mixed_subtype = 'mixed'
+ encoding = None # None => use settings default
+
+ def __init__(self, subject='', body='', from_email=None, to=None, bcc=None,
+ connection=None, attachments=None, headers=None, cc=None):
+ """
+ Initialize a single email message (which can be sent to multiple
+ recipients).
+
+ All strings used to create the message can be unicode strings
+ (or UTF-8 bytestrings). The SafeMIMEText class will handle any
+ necessary encoding conversions.
+ """
+ if to:
+ assert not isinstance(to, six.string_types), '"to" argument must be a list or tuple'
+ self.to = list(to)
+ else:
+ self.to = []
+ if cc:
+ assert not isinstance(cc, six.string_types), '"cc" argument must be a list or tuple'
+ self.cc = list(cc)
+ else:
+ self.cc = []
+ if bcc:
+ assert not isinstance(bcc, six.string_types), '"bcc" argument must be a list or tuple'
+ self.bcc = list(bcc)
+ else:
+ self.bcc = []
+ self.from_email = from_email or settings.DEFAULT_FROM_EMAIL
+ self.subject = subject
+ self.body = body
+ self.attachments = attachments or []
+ self.extra_headers = headers or {}
+ self.connection = connection
+
+ def get_connection(self, fail_silently=False):
+ from django.core.mail import get_connection
+ if not self.connection:
+ self.connection = get_connection(fail_silently=fail_silently)
+ return self.connection
+
+ def message(self):
+ encoding = self.encoding or settings.DEFAULT_CHARSET
+ msg = SafeMIMEText(self.body, self.content_subtype, encoding)
+ msg = self._create_message(msg)
+ msg['Subject'] = self.subject
+ msg['From'] = self.extra_headers.get('From', self.from_email)
+ msg['To'] = self.extra_headers.get('To', ', '.join(self.to))
+ if self.cc:
+ msg['Cc'] = ', '.join(self.cc)
+
+ # Email header names are case-insensitive (RFC 2045), so we have to
+ # accommodate that when doing comparisons.
+ header_names = [key.lower() for key in self.extra_headers]
+ if 'date' not in header_names:
+ msg['Date'] = formatdate()
+ if 'message-id' not in header_names:
+ msg['Message-ID'] = make_msgid()
+ for name, value in self.extra_headers.items():
+ if name.lower() in ('from', 'to'): # From and To are already handled
+ continue
+ msg[name] = value
+ return msg
+
+ def recipients(self):
+ """
+ Returns a list of all recipients of the email (includes direct
+ addressees as well as Cc and Bcc entries).
+ """
+ return self.to + self.cc + self.bcc
+
+ def send(self, fail_silently=False):
+ """Sends the email message."""
+ if not self.recipients():
+ # Don't bother creating the network connection if there's nobody to
+ # send to.
+ return 0
+ return self.get_connection(fail_silently).send_messages([self])
+
+ def attach(self, filename=None, content=None, mimetype=None):
+ """
+ Attaches a file with the given filename and content. The filename can
+ be omitted and the mimetype is guessed, if not provided.
+
+ If the first parameter is a MIMEBase subclass it is inserted directly
+ into the resulting message attachments.
+ """
+ if isinstance(filename, MIMEBase):
+ assert content == mimetype == None
+ self.attachments.append(filename)
+ else:
+ assert content is not None
+ self.attachments.append((filename, content, mimetype))
+
+ def attach_file(self, path, mimetype=None):
+ """Attaches a file from the filesystem."""
+ filename = os.path.basename(path)
+ with open(path, 'rb') as f:
+ content = f.read()
+ self.attach(filename, content, mimetype)
+
+ def _create_message(self, msg):
+ return self._create_attachments(msg)
+
+ def _create_attachments(self, msg):
+ if self.attachments:
+ encoding = self.encoding or settings.DEFAULT_CHARSET
+ body_msg = msg
+ msg = SafeMIMEMultipart(_subtype=self.mixed_subtype, encoding=encoding)
+ if self.body:
+ msg.attach(body_msg)
+ for attachment in self.attachments:
+ if isinstance(attachment, MIMEBase):
+ msg.attach(attachment)
+ else:
+ msg.attach(self._create_attachment(*attachment))
+ return msg
+
+ def _create_mime_attachment(self, content, mimetype):
+ """
+ Converts the content, mimetype pair into a MIME attachment object.
+ """
+ basetype, subtype = mimetype.split('/', 1)
+ if basetype == 'text':
+ encoding = self.encoding or settings.DEFAULT_CHARSET
+ attachment = SafeMIMEText(content, subtype, encoding)
+ else:
+ # Encode non-text attachments with base64.
+ attachment = MIMEBase(basetype, subtype)
+ attachment.set_payload(content)
+ Encoders.encode_base64(attachment)
+ return attachment
+
+ def _create_attachment(self, filename, content, mimetype=None):
+ """
+ Converts the filename, content, mimetype triple into a MIME attachment
+ object.
+ """
+ if mimetype is None:
+ mimetype, _ = mimetypes.guess_type(filename)
+ if mimetype is None:
+ mimetype = DEFAULT_ATTACHMENT_MIME_TYPE
+ attachment = self._create_mime_attachment(content, mimetype)
+ if filename:
+ try:
+ filename.encode('ascii')
+ except UnicodeEncodeError:
+ if six.PY2:
+ filename = filename.encode('utf-8')
+ filename = ('utf-8', '', filename)
+ attachment.add_header('Content-Disposition', 'attachment',
+ filename=filename)
+ return attachment
+
+
+class EmailMultiAlternatives(EmailMessage):
+ """
+ A version of EmailMessage that makes it easy to send multipart/alternative
+ messages. For example, including text and HTML versions of the text is
+ made easier.
+ """
+ alternative_subtype = 'alternative'
+
+ def __init__(self, subject='', body='', from_email=None, to=None, bcc=None,
+ connection=None, attachments=None, headers=None, alternatives=None,
+ cc=None):
+ """
+ Initialize a single email message (which can be sent to multiple
+ recipients).
+
+ All strings used to create the message can be unicode strings (or UTF-8
+ bytestrings). The SafeMIMEText class will handle any necessary encoding
+ conversions.
+ """
+ super(EmailMultiAlternatives, self).__init__(subject, body, from_email, to, bcc, connection, attachments, headers, cc)
+ self.alternatives = alternatives or []
+
+ def attach_alternative(self, content, mimetype):
+ """Attach an alternative content representation."""
+ assert content is not None
+ assert mimetype is not None
+ self.alternatives.append((content, mimetype))
+
+ def _create_message(self, msg):
+ return self._create_attachments(self._create_alternatives(msg))
+
+ def _create_alternatives(self, msg):
+ encoding = self.encoding or settings.DEFAULT_CHARSET
+ if self.alternatives:
+ body_msg = msg
+ msg = SafeMIMEMultipart(_subtype=self.alternative_subtype, encoding=encoding)
+ if self.body:
+ msg.attach(body_msg)
+ for alternative in self.alternatives:
+ msg.attach(self._create_mime_attachment(*alternative))
+ return msg
diff --git a/lib/python2.7/site-packages/django/core/mail/utils.py b/lib/python2.7/site-packages/django/core/mail/utils.py
new file mode 100644
index 0000000..322a3a1
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/mail/utils.py
@@ -0,0 +1,19 @@
+"""
+Email message and email sending related helper functions.
+"""
+
+import socket
+
+
+# Cache the hostname, but do it lazily: socket.getfqdn() can take a couple of
+# seconds, which slows down the restart of the server.
+class CachedDnsName(object):
+ def __str__(self):
+ return self.get_fqdn()
+
+ def get_fqdn(self):
+ if not hasattr(self, '_fqdn'):
+ self._fqdn = socket.getfqdn()
+ return self._fqdn
+
+DNS_NAME = CachedDnsName()
diff --git a/lib/python2.7/site-packages/django/core/management/__init__.py b/lib/python2.7/site-packages/django/core/management/__init__.py
new file mode 100644
index 0000000..5b0ad6c
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/__init__.py
@@ -0,0 +1,399 @@
+import collections
+import os
+import sys
+from optparse import OptionParser, NO_DEFAULT
+import imp
+
+from django.core.exceptions import ImproperlyConfigured
+from django.core.management.base import BaseCommand, CommandError, handle_default_options
+from django.core.management.color import color_style
+from django.utils.importlib import import_module
+from django.utils import six
+
+# For backwards compatibility: get_version() used to be in this module.
+from django import get_version
+
+# A cache of loaded commands, so that call_command
+# doesn't have to reload every time it's called.
+_commands = None
+
+def find_commands(management_dir):
+ """
+ Given a path to a management directory, returns a list of all the command
+ names that are available.
+
+ Returns an empty list if no commands are defined.
+ """
+ command_dir = os.path.join(management_dir, 'commands')
+ try:
+ return [f[:-3] for f in os.listdir(command_dir)
+ if not f.startswith('_') and f.endswith('.py')]
+ except OSError:
+ return []
+
+def find_management_module(app_name):
+ """
+ Determines the path to the management module for the given app_name,
+ without actually importing the application or the management module.
+
+ Raises ImportError if the management module cannot be found for any reason.
+ """
+ parts = app_name.split('.')
+ parts.append('management')
+ parts.reverse()
+ part = parts.pop()
+ path = None
+
+ # When using manage.py, the project module is added to the path,
+ # loaded, then removed from the path. This means that
+ # testproject.testapp.models can be loaded in future, even if
+ # testproject isn't in the path. When looking for the management
+ # module, we need look for the case where the project name is part
+ # of the app_name but the project directory itself isn't on the path.
+ try:
+ f, path, descr = imp.find_module(part, path)
+ except ImportError as e:
+ if os.path.basename(os.getcwd()) != part:
+ raise e
+ else:
+ if f:
+ f.close()
+
+ while parts:
+ part = parts.pop()
+ f, path, descr = imp.find_module(part, [path] if path else None)
+ if f:
+ f.close()
+ return path
+
+def load_command_class(app_name, name):
+ """
+ Given a command name and an application name, returns the Command
+ class instance. All errors raised by the import process
+ (ImportError, AttributeError) are allowed to propagate.
+ """
+ module = import_module('%s.management.commands.%s' % (app_name, name))
+ return module.Command()
+
+def get_commands():
+ """
+ Returns a dictionary mapping command names to their callback applications.
+
+ This works by looking for a management.commands package in django.core, and
+ in each installed application -- if a commands package exists, all commands
+ in that package are registered.
+
+ Core commands are always included. If a settings module has been
+ specified, user-defined commands will also be included.
+
+ The dictionary is in the format {command_name: app_name}. Key-value
+ pairs from this dictionary can then be used in calls to
+ load_command_class(app_name, command_name)
+
+ If a specific version of a command must be loaded (e.g., with the
+ startapp command), the instantiated module can be placed in the
+ dictionary in place of the application name.
+
+ The dictionary is cached on the first call and reused on subsequent
+ calls.
+ """
+ global _commands
+ if _commands is None:
+ _commands = dict([(name, 'django.core') for name in find_commands(__path__[0])])
+
+ # Find the installed apps
+ from django.conf import settings
+ try:
+ apps = settings.INSTALLED_APPS
+ except ImproperlyConfigured:
+ # Still useful for commands that do not require functional settings,
+ # like startproject or help
+ apps = []
+
+ # Find and load the management module for each installed app.
+ for app_name in apps:
+ try:
+ path = find_management_module(app_name)
+ _commands.update(dict([(name, app_name)
+ for name in find_commands(path)]))
+ except ImportError:
+ pass # No management module - ignore this app
+
+ return _commands
+
+def call_command(name, *args, **options):
+ """
+ Calls the given command, with the given options and args/kwargs.
+
+ This is the primary API you should use for calling specific commands.
+
+ Some examples:
+ call_command('syncdb')
+ call_command('shell', plain=True)
+ call_command('sqlall', 'myapp')
+ """
+ # Load the command object.
+ try:
+ app_name = get_commands()[name]
+ except KeyError:
+ raise CommandError("Unknown command: %r" % name)
+
+ if isinstance(app_name, BaseCommand):
+ # If the command is already loaded, use it directly.
+ klass = app_name
+ else:
+ klass = load_command_class(app_name, name)
+
+ # Grab out a list of defaults from the options. optparse does this for us
+ # when the script runs from the command line, but since call_command can
+ # be called programatically, we need to simulate the loading and handling
+ # of defaults (see #10080 for details).
+ defaults = {}
+ for opt in klass.option_list:
+ if opt.default is NO_DEFAULT:
+ defaults[opt.dest] = None
+ else:
+ defaults[opt.dest] = opt.default
+ defaults.update(options)
+
+ return klass.execute(*args, **defaults)
+
+class LaxOptionParser(OptionParser):
+ """
+ An option parser that doesn't raise any errors on unknown options.
+
+ This is needed because the --settings and --pythonpath options affect
+ the commands (and thus the options) that are available to the user.
+ """
+ def error(self, msg):
+ pass
+
+ def print_help(self):
+ """Output nothing.
+
+ The lax options are included in the normal option parser, so under
+ normal usage, we don't need to print the lax options.
+ """
+ pass
+
+ def print_lax_help(self):
+ """Output the basic options available to every command.
+
+ This just redirects to the default print_help() behavior.
+ """
+ OptionParser.print_help(self)
+
+ def _process_args(self, largs, rargs, values):
+ """
+ Overrides OptionParser._process_args to exclusively handle default
+ options and ignore args and other options.
+
+ This overrides the behavior of the super class, which stop parsing
+ at the first unrecognized option.
+ """
+ while rargs:
+ arg = rargs[0]
+ try:
+ if arg[0:2] == "--" and len(arg) > 2:
+ # process a single long option (possibly with value(s))
+ # the superclass code pops the arg off rargs
+ self._process_long_opt(rargs, values)
+ elif arg[:1] == "-" and len(arg) > 1:
+ # process a cluster of short options (possibly with
+ # value(s) for the last one only)
+ # the superclass code pops the arg off rargs
+ self._process_short_opts(rargs, values)
+ else:
+ # it's either a non-default option or an arg
+ # either way, add it to the args list so we can keep
+ # dealing with options
+ del rargs[0]
+ raise Exception
+ except:
+ largs.append(arg)
+
+class ManagementUtility(object):
+ """
+ Encapsulates the logic of the django-admin.py and manage.py utilities.
+
+ A ManagementUtility has a number of commands, which can be manipulated
+ by editing the self.commands dictionary.
+ """
+ def __init__(self, argv=None):
+ self.argv = argv or sys.argv[:]
+ self.prog_name = os.path.basename(self.argv[0])
+
+ def main_help_text(self, commands_only=False):
+ """
+ Returns the script's main help text, as a string.
+ """
+ if commands_only:
+ usage = sorted(get_commands().keys())
+ else:
+ usage = [
+ "",
+ "Type '%s help <subcommand>' for help on a specific subcommand." % self.prog_name,
+ "",
+ "Available subcommands:",
+ ]
+ commands_dict = collections.defaultdict(lambda: [])
+ for name, app in six.iteritems(get_commands()):
+ if app == 'django.core':
+ app = 'django'
+ else:
+ app = app.rpartition('.')[-1]
+ commands_dict[app].append(name)
+ style = color_style()
+ for app in sorted(commands_dict.keys()):
+ usage.append("")
+ usage.append(style.NOTICE("[%s]" % app))
+ for name in sorted(commands_dict[app]):
+ usage.append(" %s" % name)
+ return '\n'.join(usage)
+
+ def fetch_command(self, subcommand):
+ """
+ Tries to fetch the given subcommand, printing a message with the
+ appropriate command called from the command line (usually
+ "django-admin.py" or "manage.py") if it can't be found.
+ """
+ # Get commands outside of try block to prevent swallowing exceptions
+ commands = get_commands()
+ try:
+ app_name = commands[subcommand]
+ except KeyError:
+ sys.stderr.write("Unknown command: %r\nType '%s help' for usage.\n" % \
+ (subcommand, self.prog_name))
+ sys.exit(1)
+ if isinstance(app_name, BaseCommand):
+ # If the command is already loaded, use it directly.
+ klass = app_name
+ else:
+ klass = load_command_class(app_name, subcommand)
+ return klass
+
+ def autocomplete(self):
+ """
+ Output completion suggestions for BASH.
+
+ The output of this function is passed to BASH's `COMREPLY` variable and
+ treated as completion suggestions. `COMREPLY` expects a space
+ separated string as the result.
+
+ The `COMP_WORDS` and `COMP_CWORD` BASH environment variables are used
+ to get information about the cli input. Please refer to the BASH
+ man-page for more information about this variables.
+
+ Subcommand options are saved as pairs. A pair consists of
+ the long option string (e.g. '--exclude') and a boolean
+ value indicating if the option requires arguments. When printing to
+ stdout, a equal sign is appended to options which require arguments.
+
+ Note: If debugging this function, it is recommended to write the debug
+ output in a separate file. Otherwise the debug output will be treated
+ and formatted as potential completion suggestions.
+ """
+ # Don't complete if user hasn't sourced bash_completion file.
+ if 'DJANGO_AUTO_COMPLETE' not in os.environ:
+ return
+
+ cwords = os.environ['COMP_WORDS'].split()[1:]
+ cword = int(os.environ['COMP_CWORD'])
+
+ try:
+ curr = cwords[cword-1]
+ except IndexError:
+ curr = ''
+
+ subcommands = list(get_commands()) + ['help']
+ options = [('--help', None)]
+
+ # subcommand
+ if cword == 1:
+ print(' '.join(sorted(filter(lambda x: x.startswith(curr), subcommands))))
+ # subcommand options
+ # special case: the 'help' subcommand has no options
+ elif cwords[0] in subcommands and cwords[0] != 'help':
+ subcommand_cls = self.fetch_command(cwords[0])
+ # special case: 'runfcgi' stores additional options as
+ # 'key=value' pairs
+ if cwords[0] == 'runfcgi':
+ from django.core.servers.fastcgi import FASTCGI_OPTIONS
+ options += [(k, 1) for k in FASTCGI_OPTIONS]
+ # special case: add the names of installed apps to options
+ elif cwords[0] in ('dumpdata', 'sql', 'sqlall', 'sqlclear',
+ 'sqlcustom', 'sqlindexes', 'sqlsequencereset', 'test'):
+ try:
+ from django.conf import settings
+ # Get the last part of the dotted path as the app name.
+ options += [(a.split('.')[-1], 0) for a in settings.INSTALLED_APPS]
+ except ImportError:
+ # Fail silently if DJANGO_SETTINGS_MODULE isn't set. The
+ # user will find out once they execute the command.
+ pass
+ options += [(s_opt.get_opt_string(), s_opt.nargs) for s_opt in
+ subcommand_cls.option_list]
+ # filter out previously specified options from available options
+ prev_opts = [x.split('=')[0] for x in cwords[1:cword-1]]
+ options = [opt for opt in options if opt[0] not in prev_opts]
+
+ # filter options by current input
+ options = sorted([(k, v) for k, v in options if k.startswith(curr)])
+ for option in options:
+ opt_label = option[0]
+ # append '=' to options which require args
+ if option[1]:
+ opt_label += '='
+ print(opt_label)
+ sys.exit(1)
+
+ def execute(self):
+ """
+ Given the command-line arguments, this figures out which subcommand is
+ being run, creates a parser appropriate to that command, and runs it.
+ """
+ # Preprocess options to extract --settings and --pythonpath.
+ # These options could affect the commands that are available, so they
+ # must be processed early.
+ parser = LaxOptionParser(usage="%prog subcommand [options] [args]",
+ version=get_version(),
+ option_list=BaseCommand.option_list)
+ self.autocomplete()
+ try:
+ options, args = parser.parse_args(self.argv)
+ handle_default_options(options)
+ except:
+ pass # Ignore any option errors at this point.
+
+ try:
+ subcommand = self.argv[1]
+ except IndexError:
+ subcommand = 'help' # Display help if no arguments were given.
+
+ if subcommand == 'help':
+ if len(args) <= 2:
+ parser.print_lax_help()
+ sys.stdout.write(self.main_help_text() + '\n')
+ elif args[2] == '--commands':
+ sys.stdout.write(self.main_help_text(commands_only=True) + '\n')
+ else:
+ self.fetch_command(args[2]).print_help(self.prog_name, args[2])
+ elif subcommand == 'version':
+ sys.stdout.write(parser.get_version() + '\n')
+ # Special-cases: We want 'django-admin.py --version' and
+ # 'django-admin.py --help' to work, for backwards compatibility.
+ elif self.argv[1:] == ['--version']:
+ # LaxOptionParser already takes care of printing the version.
+ pass
+ elif self.argv[1:] in (['--help'], ['-h']):
+ parser.print_lax_help()
+ sys.stdout.write(self.main_help_text() + '\n')
+ else:
+ self.fetch_command(subcommand).run_from_argv(self.argv)
+
+def execute_from_command_line(argv=None):
+ """
+ A simple method that runs a ManagementUtility.
+ """
+ utility = ManagementUtility(argv)
+ utility.execute()
diff --git a/lib/python2.7/site-packages/django/core/management/base.py b/lib/python2.7/site-packages/django/core/management/base.py
new file mode 100644
index 0000000..74f3913
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/base.py
@@ -0,0 +1,422 @@
+"""
+Base classes for writing management commands (named commands which can
+be executed through ``django-admin.py`` or ``manage.py``).
+
+"""
+from __future__ import unicode_literals
+
+import os
+import sys
+
+from optparse import make_option, OptionParser
+
+import django
+from django.core.exceptions import ImproperlyConfigured
+from django.core.management.color import color_style
+from django.utils.encoding import force_str
+from django.utils.six import StringIO
+
+
+class CommandError(Exception):
+ """
+ Exception class indicating a problem while executing a management
+ command.
+
+ If this exception is raised during the execution of a management
+ command, it will be caught and turned into a nicely-printed error
+ message to the appropriate output stream (i.e., stderr); as a
+ result, raising this exception (with a sensible description of the
+ error) is the preferred way to indicate that something has gone
+ wrong in the execution of a command.
+
+ """
+ pass
+
+
+def handle_default_options(options):
+ """
+ Include any default options that all commands should accept here
+ so that ManagementUtility can handle them before searching for
+ user commands.
+
+ """
+ if options.settings:
+ os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
+ if options.pythonpath:
+ sys.path.insert(0, options.pythonpath)
+
+
+class OutputWrapper(object):
+ """
+ Wrapper around stdout/stderr
+ """
+ def __init__(self, out, style_func=None, ending='\n'):
+ self._out = out
+ self.style_func = None
+ if hasattr(out, 'isatty') and out.isatty():
+ self.style_func = style_func
+ self.ending = ending
+
+ def __getattr__(self, name):
+ return getattr(self._out, name)
+
+ def write(self, msg, style_func=None, ending=None):
+ ending = self.ending if ending is None else ending
+ if ending and not msg.endswith(ending):
+ msg += ending
+ style_func = [f for f in (style_func, self.style_func, lambda x:x)
+ if f is not None][0]
+ self._out.write(force_str(style_func(msg)))
+
+
+class BaseCommand(object):
+ """
+ The base class from which all management commands ultimately
+ derive.
+
+ Use this class if you want access to all of the mechanisms which
+ parse the command-line arguments and work out what code to call in
+ response; if you don't need to change any of that behavior,
+ consider using one of the subclasses defined in this file.
+
+ If you are interested in overriding/customizing various aspects of
+ the command-parsing and -execution behavior, the normal flow works
+ as follows:
+
+ 1. ``django-admin.py`` or ``manage.py`` loads the command class
+ and calls its ``run_from_argv()`` method.
+
+ 2. The ``run_from_argv()`` method calls ``create_parser()`` to get
+ an ``OptionParser`` for the arguments, parses them, performs
+ any environment changes requested by options like
+ ``pythonpath``, and then calls the ``execute()`` method,
+ passing the parsed arguments.
+
+ 3. The ``execute()`` method attempts to carry out the command by
+ calling the ``handle()`` method with the parsed arguments; any
+ output produced by ``handle()`` will be printed to standard
+ output and, if the command is intended to produce a block of
+ SQL statements, will be wrapped in ``BEGIN`` and ``COMMIT``.
+
+ 4. If ``handle()`` or ``execute()`` raised any exception (e.g.
+ ``CommandError``), ``run_from_argv()`` will instead print an error
+ message to ``stderr``.
+
+ Thus, the ``handle()`` method is typically the starting point for
+ subclasses; many built-in commands and command types either place
+ all of their logic in ``handle()``, or perform some additional
+ parsing work in ``handle()`` and then delegate from it to more
+ specialized methods as needed.
+
+ Several attributes affect behavior at various steps along the way:
+
+ ``args``
+ A string listing the arguments accepted by the command,
+ suitable for use in help messages; e.g., a command which takes
+ a list of application names might set this to '<appname
+ appname ...>'.
+
+ ``can_import_settings``
+ A boolean indicating whether the command needs to be able to
+ import Django settings; if ``True``, ``execute()`` will verify
+ that this is possible before proceeding. Default value is
+ ``True``.
+
+ ``help``
+ A short description of the command, which will be printed in
+ help messages.
+
+ ``option_list``
+ This is the list of ``optparse`` options which will be fed
+ into the command's ``OptionParser`` for parsing arguments.
+
+ ``output_transaction``
+ A boolean indicating whether the command outputs SQL
+ statements; if ``True``, the output will automatically be
+ wrapped with ``BEGIN;`` and ``COMMIT;``. Default value is
+ ``False``.
+
+ ``requires_model_validation``
+ A boolean; if ``True``, validation of installed models will be
+ performed prior to executing the command. Default value is
+ ``True``. To validate an individual application's models
+ rather than all applications' models, call
+ ``self.validate(app)`` from ``handle()``, where ``app`` is the
+ application's Python module.
+
+ ``leave_locale_alone``
+ A boolean indicating whether the locale set in settings should be
+ preserved during the execution of the command instead of being
+ forcibly set to 'en-us'.
+
+ Default value is ``False``.
+
+ Make sure you know what you are doing if you decide to change the value
+ of this option in your custom command if it creates database content
+ that is locale-sensitive and such content shouldn't contain any
+ translations (like it happens e.g. with django.contrim.auth
+ permissions) as making the locale differ from the de facto default
+ 'en-us' might cause unintended effects.
+
+ This option can't be False when the can_import_settings option is set
+ to False too because attempting to set the locale needs access to
+ settings. This condition will generate a CommandError.
+ """
+ # Metadata about this command.
+ option_list = (
+ make_option('-v', '--verbosity', action='store', dest='verbosity', default='1',
+ type='choice', choices=['0', '1', '2', '3'],
+ help='Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output'),
+ make_option('--settings',
+ help='The Python path to a settings module, e.g. "myproject.settings.main". If this isn\'t provided, the DJANGO_SETTINGS_MODULE environment variable will be used.'),
+ make_option('--pythonpath',
+ help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".'),
+ make_option('--traceback', action='store_true',
+ help='Raise on exception'),
+ )
+ help = ''
+ args = ''
+
+ # Configuration shortcuts that alter various logic.
+ can_import_settings = True
+ requires_model_validation = True
+ output_transaction = False # Whether to wrap the output in a "BEGIN; COMMIT;"
+ leave_locale_alone = False
+
+ def __init__(self):
+ self.style = color_style()
+
+ def get_version(self):
+ """
+ Return the Django version, which should be correct for all
+ built-in Django commands. User-supplied commands should
+ override this method.
+
+ """
+ return django.get_version()
+
+ def usage(self, subcommand):
+ """
+ Return a brief description of how to use this command, by
+ default from the attribute ``self.help``.
+
+ """
+ usage = '%%prog %s [options] %s' % (subcommand, self.args)
+ if self.help:
+ return '%s\n\n%s' % (usage, self.help)
+ else:
+ return usage
+
+ def create_parser(self, prog_name, subcommand):
+ """
+ Create and return the ``OptionParser`` which will be used to
+ parse the arguments to this command.
+
+ """
+ return OptionParser(prog=prog_name,
+ usage=self.usage(subcommand),
+ version=self.get_version(),
+ option_list=self.option_list)
+
+ def print_help(self, prog_name, subcommand):
+ """
+ Print the help message for this command, derived from
+ ``self.usage()``.
+
+ """
+ parser = self.create_parser(prog_name, subcommand)
+ parser.print_help()
+
+ def run_from_argv(self, argv):
+ """
+ Set up any environment changes requested (e.g., Python path
+ and Django settings), then run this command. If the
+ command raises a ``CommandError``, intercept it and print it sensibly
+ to stderr. If the ``--traceback`` option is present or the raised
+ ``Exception`` is not ``CommandError``, raise it.
+ """
+ parser = self.create_parser(argv[0], argv[1])
+ options, args = parser.parse_args(argv[2:])
+ handle_default_options(options)
+ try:
+ self.execute(*args, **options.__dict__)
+ except Exception as e:
+ if options.traceback or not isinstance(e, CommandError):
+ raise
+
+ # self.stderr is not guaranteed to be set here
+ stderr = getattr(self, 'stderr', OutputWrapper(sys.stderr, self.style.ERROR))
+ stderr.write('%s: %s' % (e.__class__.__name__, e))
+ sys.exit(1)
+
+ def execute(self, *args, **options):
+ """
+ Try to execute this command, performing model validation if
+ needed (as controlled by the attribute
+ ``self.requires_model_validation``, except if force-skipped).
+ """
+ self.stdout = OutputWrapper(options.get('stdout', sys.stdout))
+ self.stderr = OutputWrapper(options.get('stderr', sys.stderr), self.style.ERROR)
+
+ if self.can_import_settings:
+ from django.conf import settings
+
+ saved_locale = None
+ if not self.leave_locale_alone:
+ # Only mess with locales if we can assume we have a working
+ # settings file, because django.utils.translation requires settings
+ # (The final saying about whether the i18n machinery is active will be
+ # found in the value of the USE_I18N setting)
+ if not self.can_import_settings:
+ raise CommandError("Incompatible values of 'leave_locale_alone' "
+ "(%s) and 'can_import_settings' (%s) command "
+ "options." % (self.leave_locale_alone,
+ self.can_import_settings))
+ # Switch to US English, because django-admin.py creates database
+ # content like permissions, and those shouldn't contain any
+ # translations.
+ from django.utils import translation
+ saved_locale = translation.get_language()
+ translation.activate('en-us')
+
+ try:
+ if self.requires_model_validation and not options.get('skip_validation'):
+ self.validate()
+ output = self.handle(*args, **options)
+ if output:
+ if self.output_transaction:
+ # This needs to be imported here, because it relies on
+ # settings.
+ from django.db import connections, DEFAULT_DB_ALIAS
+ connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
+ if connection.ops.start_transaction_sql():
+ self.stdout.write(self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()))
+ self.stdout.write(output)
+ if self.output_transaction:
+ self.stdout.write('\n' + self.style.SQL_KEYWORD("COMMIT;"))
+ finally:
+ if saved_locale is not None:
+ translation.activate(saved_locale)
+
+ def validate(self, app=None, display_num_errors=False):
+ """
+ Validates the given app, raising CommandError for any errors.
+
+ If app is None, then this will validate all installed apps.
+
+ """
+ from django.core.management.validation import get_validation_errors
+ s = StringIO()
+ num_errors = get_validation_errors(s, app)
+ if num_errors:
+ s.seek(0)
+ error_text = s.read()
+ raise CommandError("One or more models did not validate:\n%s" % error_text)
+ if display_num_errors:
+ self.stdout.write("%s error%s found" % (num_errors, '' if num_errors == 1 else 's'))
+
+ def handle(self, *args, **options):
+ """
+ The actual logic of the command. Subclasses must implement
+ this method.
+
+ """
+ raise NotImplementedError()
+
+
+class AppCommand(BaseCommand):
+ """
+ A management command which takes one or more installed application
+ names as arguments, and does something with each of them.
+
+ Rather than implementing ``handle()``, subclasses must implement
+ ``handle_app()``, which will be called once for each application.
+
+ """
+ args = '<appname appname ...>'
+
+ def handle(self, *app_labels, **options):
+ from django.db import models
+ if not app_labels:
+ raise CommandError('Enter at least one appname.')
+ try:
+ app_list = [models.get_app(app_label) for app_label in app_labels]
+ except (ImproperlyConfigured, ImportError) as e:
+ raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e)
+ output = []
+ for app in app_list:
+ app_output = self.handle_app(app, **options)
+ if app_output:
+ output.append(app_output)
+ return '\n'.join(output)
+
+ def handle_app(self, app, **options):
+ """
+ Perform the command's actions for ``app``, which will be the
+ Python module corresponding to an application name given on
+ the command line.
+
+ """
+ raise NotImplementedError()
+
+
+class LabelCommand(BaseCommand):
+ """
+ A management command which takes one or more arbitrary arguments
+ (labels) on the command line, and does something with each of
+ them.
+
+ Rather than implementing ``handle()``, subclasses must implement
+ ``handle_label()``, which will be called once for each label.
+
+ If the arguments should be names of installed applications, use
+ ``AppCommand`` instead.
+
+ """
+ args = '<label label ...>'
+ label = 'label'
+
+ def handle(self, *labels, **options):
+ if not labels:
+ raise CommandError('Enter at least one %s.' % self.label)
+
+ output = []
+ for label in labels:
+ label_output = self.handle_label(label, **options)
+ if label_output:
+ output.append(label_output)
+ return '\n'.join(output)
+
+ def handle_label(self, label, **options):
+ """
+ Perform the command's actions for ``label``, which will be the
+ string as given on the command line.
+
+ """
+ raise NotImplementedError()
+
+
+class NoArgsCommand(BaseCommand):
+ """
+ A command which takes no arguments on the command line.
+
+ Rather than implementing ``handle()``, subclasses must implement
+ ``handle_noargs()``; ``handle()`` itself is overridden to ensure
+ no arguments are passed to the command.
+
+ Attempting to pass arguments will raise ``CommandError``.
+
+ """
+ args = ''
+
+ def handle(self, *args, **options):
+ if args:
+ raise CommandError("Command doesn't accept any arguments")
+ return self.handle_noargs(**options)
+
+ def handle_noargs(self, **options):
+ """
+ Perform this command's actions.
+
+ """
+ raise NotImplementedError()
diff --git a/lib/python2.7/site-packages/django/core/management/color.py b/lib/python2.7/site-packages/django/core/management/color.py
new file mode 100644
index 0000000..8c7a87f
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/color.py
@@ -0,0 +1,50 @@
+"""
+Sets up the terminal color scheme.
+"""
+
+import os
+import sys
+
+from django.utils import termcolors
+
+def supports_color():
+ """
+ Returns True if the running system's terminal supports color, and False
+ otherwise.
+ """
+ unsupported_platform = (sys.platform in ('win32', 'Pocket PC'))
+ # isatty is not always implemented, #6223.
+ is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
+ if unsupported_platform or not is_a_tty:
+ return False
+ return True
+
+def color_style():
+ """Returns a Style object with the Django color scheme."""
+ if not supports_color():
+ style = no_style()
+ else:
+ DJANGO_COLORS = os.environ.get('DJANGO_COLORS', '')
+ color_settings = termcolors.parse_color_setting(DJANGO_COLORS)
+ if color_settings:
+ class dummy: pass
+ style = dummy()
+ # The nocolor palette has all available roles.
+ # Use that pallete as the basis for populating
+ # the palette as defined in the environment.
+ for role in termcolors.PALETTES[termcolors.NOCOLOR_PALETTE]:
+ format = color_settings.get(role,{})
+ setattr(style, role, termcolors.make_style(**format))
+ # For backwards compatibility,
+ # set style for ERROR_OUTPUT == ERROR
+ style.ERROR_OUTPUT = style.ERROR
+ else:
+ style = no_style()
+ return style
+
+def no_style():
+ """Returns a Style object that has no colors."""
+ class dummy:
+ def __getattr__(self, attr):
+ return lambda x: x
+ return dummy()
diff --git a/lib/python2.7/site-packages/django/core/management/commands/__init__.py b/lib/python2.7/site-packages/django/core/management/commands/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/__init__.py
diff --git a/lib/python2.7/site-packages/django/core/management/commands/check.py b/lib/python2.7/site-packages/django/core/management/commands/check.py
new file mode 100644
index 0000000..05f48c8
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/check.py
@@ -0,0 +1,14 @@
+from __future__ import unicode_literals
+import warnings
+
+from django.core.checks.compatibility.base import check_compatibility
+from django.core.management.base import NoArgsCommand
+
+
+class Command(NoArgsCommand):
+ help = "Checks your configuration's compatibility with this version " + \
+ "of Django."
+
+ def handle_noargs(self, **options):
+ for message in check_compatibility():
+ warnings.warn(message)
diff --git a/lib/python2.7/site-packages/django/core/management/commands/cleanup.py b/lib/python2.7/site-packages/django/core/management/commands/cleanup.py
new file mode 100644
index 0000000..e158ebb
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/cleanup.py
@@ -0,0 +1,11 @@
+import warnings
+
+from django.contrib.sessions.management.commands import clearsessions
+
+
+class Command(clearsessions.Command):
+ def handle_noargs(self, **options):
+ warnings.warn(
+ "The `cleanup` command has been deprecated in favor of `clearsessions`.",
+ DeprecationWarning)
+ super(Command, self).handle_noargs(**options)
diff --git a/lib/python2.7/site-packages/django/core/management/commands/compilemessages.py b/lib/python2.7/site-packages/django/core/management/commands/compilemessages.py
new file mode 100644
index 0000000..9e9df26
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/compilemessages.py
@@ -0,0 +1,71 @@
+from __future__ import unicode_literals
+
+import codecs
+import os
+from optparse import make_option
+
+from django.core.management.base import BaseCommand, CommandError
+from django.core.management.utils import find_command, popen_wrapper
+from django.utils._os import npath
+
+def has_bom(fn):
+ with open(fn, 'rb') as f:
+ sample = f.read(4)
+ return sample[:3] == b'\xef\xbb\xbf' or \
+ sample.startswith(codecs.BOM_UTF16_LE) or \
+ sample.startswith(codecs.BOM_UTF16_BE)
+
+def compile_messages(stdout, locale=None):
+ program = 'msgfmt'
+ if find_command(program) is None:
+ raise CommandError("Can't find %s. Make sure you have GNU gettext tools 0.15 or newer installed." % program)
+
+ basedirs = [os.path.join('conf', 'locale'), 'locale']
+ if os.environ.get('DJANGO_SETTINGS_MODULE'):
+ from django.conf import settings
+ basedirs.extend(settings.LOCALE_PATHS)
+
+ # Gather existing directories.
+ basedirs = set(map(os.path.abspath, filter(os.path.isdir, basedirs)))
+
+ if not basedirs:
+ raise CommandError("This script should be run from the Django Git checkout or your project or app tree, or with the settings module specified.")
+
+ for basedir in basedirs:
+ if locale:
+ dirs = [os.path.join(basedir, l, 'LC_MESSAGES') for l in locale]
+ else:
+ dirs = [basedir]
+ for ldir in dirs:
+ for dirpath, dirnames, filenames in os.walk(ldir):
+ for f in filenames:
+ if not f.endswith('.po'):
+ continue
+ stdout.write('processing file %s in %s\n' % (f, dirpath))
+ fn = os.path.join(dirpath, f)
+ if has_bom(fn):
+ raise CommandError("The %s file has a BOM (Byte Order Mark). Django only supports .po files encoded in UTF-8 and without any BOM." % fn)
+ pf = os.path.splitext(fn)[0]
+ args = [program, '--check-format', '-o', npath(pf + '.mo'), npath(pf + '.po')]
+ output, errors, status = popen_wrapper(args)
+ if status:
+ if errors:
+ msg = "Execution of %s failed: %s" % (program, errors)
+ else:
+ msg = "Execution of %s failed" % program
+ raise CommandError(msg)
+
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--locale', '-l', dest='locale', action='append',
+ help='locale(s) to process (e.g. de_AT). Default is to process all. Can be used multiple times.'),
+ )
+ help = 'Compiles .po files to .mo files for use with builtin gettext support.'
+
+ requires_model_validation = False
+ leave_locale_alone = True
+
+ def handle(self, **options):
+ locale = options.get('locale')
+ compile_messages(self.stdout, locale=locale)
diff --git a/lib/python2.7/site-packages/django/core/management/commands/createcachetable.py b/lib/python2.7/site-packages/django/core/management/commands/createcachetable.py
new file mode 100644
index 0000000..d7ce3e9
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/createcachetable.py
@@ -0,0 +1,65 @@
+from optparse import make_option
+
+from django.core.cache.backends.db import BaseDatabaseCache
+from django.core.management.base import LabelCommand, CommandError
+from django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS
+from django.db.utils import DatabaseError
+from django.utils.encoding import force_text
+
+
+class Command(LabelCommand):
+ help = "Creates the table needed to use the SQL cache backend."
+ args = "<tablename>"
+ label = 'tablename'
+
+ option_list = LabelCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database onto '
+ 'which the cache table will be installed. '
+ 'Defaults to the "default" database.'),
+ )
+
+ requires_model_validation = False
+
+ def handle_label(self, tablename, **options):
+ db = options.get('database')
+ cache = BaseDatabaseCache(tablename, {})
+ if not router.allow_syncdb(db, cache.cache_model_class):
+ return
+ connection = connections[db]
+ fields = (
+ # "key" is a reserved word in MySQL, so use "cache_key" instead.
+ models.CharField(name='cache_key', max_length=255, unique=True, primary_key=True),
+ models.TextField(name='value'),
+ models.DateTimeField(name='expires', db_index=True),
+ )
+ table_output = []
+ index_output = []
+ qn = connection.ops.quote_name
+ for f in fields:
+ field_output = [qn(f.name), f.db_type(connection=connection)]
+ field_output.append("%sNULL" % ("NOT " if not f.null else ""))
+ if f.primary_key:
+ field_output.append("PRIMARY KEY")
+ elif f.unique:
+ field_output.append("UNIQUE")
+ if f.db_index:
+ unique = "UNIQUE " if f.unique else ""
+ index_output.append("CREATE %sINDEX %s ON %s (%s);" % \
+ (unique, qn('%s_%s' % (tablename, f.name)), qn(tablename),
+ qn(f.name)))
+ table_output.append(" ".join(field_output))
+ full_statement = ["CREATE TABLE %s (" % qn(tablename)]
+ for i, line in enumerate(table_output):
+ full_statement.append(' %s%s' % (line, ',' if i < len(table_output)-1 else ''))
+ full_statement.append(');')
+ with transaction.commit_on_success_unless_managed():
+ curs = connection.cursor()
+ try:
+ curs.execute("\n".join(full_statement))
+ except DatabaseError as e:
+ raise CommandError(
+ "Cache table '%s' could not be created.\nThe error was: %s." %
+ (tablename, force_text(e)))
+ for statement in index_output:
+ curs.execute(statement)
diff --git a/lib/python2.7/site-packages/django/core/management/commands/dbshell.py b/lib/python2.7/site-packages/django/core/management/commands/dbshell.py
new file mode 100644
index 0000000..7465920
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/dbshell.py
@@ -0,0 +1,28 @@
+from optparse import make_option
+
+from django.core.management.base import BaseCommand, CommandError
+from django.db import connections, DEFAULT_DB_ALIAS
+
+class Command(BaseCommand):
+ help = ("Runs the command-line client for specified database, or the "
+ "default database if none is provided.")
+
+ option_list = BaseCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database onto which to '
+ 'open a shell. Defaults to the "default" database.'),
+ )
+
+ requires_model_validation = False
+
+ def handle(self, **options):
+ connection = connections[options.get('database')]
+ try:
+ connection.client.runshell()
+ except OSError:
+ # Note that we're assuming OSError means that the client program
+ # isn't installed. There's a possibility OSError would be raised
+ # for some other reason, in which case this error message would be
+ # inaccurate. Still, this message catches the common case.
+ raise CommandError('You appear not to have the %r program installed or on your path.' % \
+ connection.client.executable_name)
diff --git a/lib/python2.7/site-packages/django/core/management/commands/diffsettings.py b/lib/python2.7/site-packages/django/core/management/commands/diffsettings.py
new file mode 100644
index 0000000..9e70e9a
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/diffsettings.py
@@ -0,0 +1,40 @@
+from optparse import make_option
+
+from django.core.management.base import NoArgsCommand
+
+def module_to_dict(module, omittable=lambda k: k.startswith('_')):
+ """Converts a module namespace to a Python dictionary."""
+ return dict((k, repr(v)) for k, v in module.__dict__.items() if not omittable(k))
+
+class Command(NoArgsCommand):
+ help = """Displays differences between the current settings.py and Django's
+ default settings. Settings that don't appear in the defaults are
+ followed by "###"."""
+
+ option_list = NoArgsCommand.option_list + (
+ make_option('--all', action='store_true', dest='all', default=False,
+ help='Display all settings, regardless of their value. '
+ 'Default values are prefixed by "###".'),
+ )
+
+ requires_model_validation = False
+
+ def handle_noargs(self, **options):
+ # Inspired by Postfix's "postconf -n".
+ from django.conf import settings, global_settings
+
+ # Because settings are imported lazily, we need to explicitly load them.
+ settings._setup()
+
+ user_settings = module_to_dict(settings._wrapped)
+ default_settings = module_to_dict(global_settings)
+
+ output = []
+ for key in sorted(user_settings):
+ if key not in default_settings:
+ output.append("%s = %s ###" % (key, user_settings[key]))
+ elif user_settings[key] != default_settings[key]:
+ output.append("%s = %s" % (key, user_settings[key]))
+ elif options['all']:
+ output.append("### %s = %s" % (key, user_settings[key]))
+ return '\n'.join(output)
diff --git a/lib/python2.7/site-packages/django/core/management/commands/dumpdata.py b/lib/python2.7/site-packages/django/core/management/commands/dumpdata.py
new file mode 100644
index 0000000..e2a2f24
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/dumpdata.py
@@ -0,0 +1,214 @@
+from django.core.exceptions import ImproperlyConfigured
+from django.core.management.base import BaseCommand, CommandError
+from django.core import serializers
+from django.db import router, DEFAULT_DB_ALIAS
+from django.utils.datastructures import SortedDict
+
+from optparse import make_option
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--format', default='json', dest='format',
+ help='Specifies the output serialization format for fixtures.'),
+ make_option('--indent', default=None, dest='indent', type='int',
+ help='Specifies the indent level to use when pretty-printing output'),
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a specific database to dump '
+ 'fixtures from. Defaults to the "default" database.'),
+ make_option('-e', '--exclude', dest='exclude',action='append', default=[],
+ help='An appname or appname.ModelName to exclude (use multiple --exclude to exclude multiple apps/models).'),
+ make_option('-n', '--natural', action='store_true', dest='use_natural_keys', default=False,
+ help='Use natural keys if they are available.'),
+ make_option('-a', '--all', action='store_true', dest='use_base_manager', default=False,
+ help="Use Django's base manager to dump all models stored in the database, including those that would otherwise be filtered or modified by a custom manager."),
+ make_option('--pks', dest='primary_keys', help="Only dump objects with "
+ "given primary keys. Accepts a comma seperated list of keys. "
+ "This option will only work when you specify one model."),
+ )
+ help = ("Output the contents of the database as a fixture of the given "
+ "format (using each model's default manager unless --all is "
+ "specified).")
+ args = '[appname appname.ModelName ...]'
+
+ def handle(self, *app_labels, **options):
+ from django.db.models import get_app, get_apps, get_model
+
+ format = options.get('format')
+ indent = options.get('indent')
+ using = options.get('database')
+ excludes = options.get('exclude')
+ show_traceback = options.get('traceback')
+ use_natural_keys = options.get('use_natural_keys')
+ use_base_manager = options.get('use_base_manager')
+ pks = options.get('primary_keys')
+
+ if pks:
+ primary_keys = pks.split(',')
+ else:
+ primary_keys = []
+
+ excluded_apps = set()
+ excluded_models = set()
+ for exclude in excludes:
+ if '.' in exclude:
+ app_label, model_name = exclude.split('.', 1)
+ model_obj = get_model(app_label, model_name)
+ if not model_obj:
+ raise CommandError('Unknown model in excludes: %s' % exclude)
+ excluded_models.add(model_obj)
+ else:
+ try:
+ app_obj = get_app(exclude)
+ excluded_apps.add(app_obj)
+ except ImproperlyConfigured:
+ raise CommandError('Unknown app in excludes: %s' % exclude)
+
+ if len(app_labels) == 0:
+ if primary_keys:
+ raise CommandError("You can only use --pks option with one model")
+ app_list = SortedDict((app, None) for app in get_apps() if app not in excluded_apps)
+ else:
+ if len(app_labels) > 1 and primary_keys:
+ raise CommandError("You can only use --pks option with one model")
+ app_list = SortedDict()
+ for label in app_labels:
+ try:
+ app_label, model_label = label.split('.')
+ try:
+ app = get_app(app_label)
+ except ImproperlyConfigured:
+ raise CommandError("Unknown application: %s" % app_label)
+ if app in excluded_apps:
+ continue
+ model = get_model(app_label, model_label)
+ if model is None:
+ raise CommandError("Unknown model: %s.%s" % (app_label, model_label))
+
+ if app in app_list.keys():
+ if app_list[app] and model not in app_list[app]:
+ app_list[app].append(model)
+ else:
+ app_list[app] = [model]
+ except ValueError:
+ if primary_keys:
+ raise CommandError("You can only use --pks option with one model")
+ # This is just an app - no model qualifier
+ app_label = label
+ try:
+ app = get_app(app_label)
+ except ImproperlyConfigured:
+ raise CommandError("Unknown application: %s" % app_label)
+ if app in excluded_apps:
+ continue
+ app_list[app] = None
+
+ # Check that the serialization format exists; this is a shortcut to
+ # avoid collating all the objects and _then_ failing.
+ if format not in serializers.get_public_serializer_formats():
+ try:
+ serializers.get_serializer(format)
+ except serializers.SerializerDoesNotExist:
+ pass
+
+ raise CommandError("Unknown serialization format: %s" % format)
+
+ def get_objects():
+ # Collate the objects to be serialized.
+ for model in sort_dependencies(app_list.items()):
+ if model in excluded_models:
+ continue
+ if not model._meta.proxy and router.allow_syncdb(using, model):
+ if use_base_manager:
+ objects = model._base_manager
+ else:
+ objects = model._default_manager
+
+ queryset = objects.using(using).order_by(model._meta.pk.name)
+ if primary_keys:
+ queryset = queryset.filter(pk__in=primary_keys)
+ for obj in queryset.iterator():
+ yield obj
+
+ try:
+ self.stdout.ending = None
+ serializers.serialize(format, get_objects(), indent=indent,
+ use_natural_keys=use_natural_keys, stream=self.stdout)
+ except Exception as e:
+ if show_traceback:
+ raise
+ raise CommandError("Unable to serialize database: %s" % e)
+
+def sort_dependencies(app_list):
+ """Sort a list of app,modellist pairs into a single list of models.
+
+ The single list of models is sorted so that any model with a natural key
+ is serialized before a normal model, and any model with a natural key
+ dependency has it's dependencies serialized first.
+ """
+ from django.db.models import get_model, get_models
+ # Process the list of models, and get the list of dependencies
+ model_dependencies = []
+ models = set()
+ for app, model_list in app_list:
+ if model_list is None:
+ model_list = get_models(app)
+
+ for model in model_list:
+ models.add(model)
+ # Add any explicitly defined dependencies
+ if hasattr(model, 'natural_key'):
+ deps = getattr(model.natural_key, 'dependencies', [])
+ if deps:
+ deps = [get_model(*d.split('.')) for d in deps]
+ else:
+ deps = []
+
+ # Now add a dependency for any FK or M2M relation with
+ # a model that defines a natural key
+ for field in model._meta.fields:
+ if hasattr(field.rel, 'to'):
+ rel_model = field.rel.to
+ if hasattr(rel_model, 'natural_key') and rel_model != model:
+ deps.append(rel_model)
+ for field in model._meta.many_to_many:
+ rel_model = field.rel.to
+ if hasattr(rel_model, 'natural_key') and rel_model != model:
+ deps.append(rel_model)
+ model_dependencies.append((model, deps))
+
+ model_dependencies.reverse()
+ # Now sort the models to ensure that dependencies are met. This
+ # is done by repeatedly iterating over the input list of models.
+ # If all the dependencies of a given model are in the final list,
+ # that model is promoted to the end of the final list. This process
+ # continues until the input list is empty, or we do a full iteration
+ # over the input models without promoting a model to the final list.
+ # If we do a full iteration without a promotion, that means there are
+ # circular dependencies in the list.
+ model_list = []
+ while model_dependencies:
+ skipped = []
+ changed = False
+ while model_dependencies:
+ model, deps = model_dependencies.pop()
+
+ # If all of the models in the dependency list are either already
+ # on the final model list, or not on the original serialization list,
+ # then we've found another model with all it's dependencies satisfied.
+ found = True
+ for candidate in ((d not in models or d in model_list) for d in deps):
+ if not candidate:
+ found = False
+ if found:
+ model_list.append(model)
+ changed = True
+ else:
+ skipped.append((model, deps))
+ if not changed:
+ raise CommandError("Can't resolve dependencies for %s in serialized app list." %
+ ', '.join('%s.%s' % (model._meta.app_label, model._meta.object_name)
+ for model, deps in sorted(skipped, key=lambda obj: obj[0].__name__))
+ )
+ model_dependencies = skipped
+
+ return model_list
diff --git a/lib/python2.7/site-packages/django/core/management/commands/flush.py b/lib/python2.7/site-packages/django/core/management/commands/flush.py
new file mode 100644
index 0000000..95dd634
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/flush.py
@@ -0,0 +1,101 @@
+import sys
+from optparse import make_option
+
+from django.conf import settings
+from django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS
+from django.core.management import call_command
+from django.core.management.base import NoArgsCommand, CommandError
+from django.core.management.color import no_style
+from django.core.management.sql import sql_flush, emit_post_sync_signal
+from django.utils.importlib import import_module
+from django.utils.six.moves import input
+from django.utils import six
+
+
+class Command(NoArgsCommand):
+ option_list = NoArgsCommand.option_list + (
+ make_option('--noinput', action='store_false', dest='interactive', default=True,
+ help='Tells Django to NOT prompt the user for input of any kind.'),
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to flush. '
+ 'Defaults to the "default" database.'),
+ make_option('--no-initial-data', action='store_false', dest='load_initial_data', default=True,
+ help='Tells Django not to load any initial data after database synchronization.'),
+ )
+ help = ('Returns the database to the state it was in immediately after '
+ 'syncdb was executed. This means that all data will be removed '
+ 'from the database, any post-synchronization handlers will be '
+ 're-executed, and the initial_data fixture will be re-installed.')
+
+ def handle_noargs(self, **options):
+ db = options.get('database')
+ connection = connections[db]
+ verbosity = int(options.get('verbosity'))
+ interactive = options.get('interactive')
+ # The following are stealth options used by Django's internals.
+ reset_sequences = options.get('reset_sequences', True)
+ allow_cascade = options.get('allow_cascade', False)
+ inhibit_post_syncdb = options.get('inhibit_post_syncdb', False)
+
+ self.style = no_style()
+
+ # Import the 'management' module within each installed app, to register
+ # dispatcher events.
+ for app_name in settings.INSTALLED_APPS:
+ try:
+ import_module('.management', app_name)
+ except ImportError:
+ pass
+
+ sql_list = sql_flush(self.style, connection, only_django=True,
+ reset_sequences=reset_sequences,
+ allow_cascade=allow_cascade)
+
+ if interactive:
+ confirm = input("""You have requested a flush of the database.
+This will IRREVERSIBLY DESTROY all data currently in the %r database,
+and return each table to the state it was in after syncdb.
+Are you sure you want to do this?
+
+ Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'])
+ else:
+ confirm = 'yes'
+
+ if confirm == 'yes':
+ try:
+ with transaction.commit_on_success_unless_managed():
+ cursor = connection.cursor()
+ for sql in sql_list:
+ cursor.execute(sql)
+ except Exception as e:
+ new_msg = (
+ "Database %s couldn't be flushed. Possible reasons:\n"
+ " * The database isn't running or isn't configured correctly.\n"
+ " * At least one of the expected database tables doesn't exist.\n"
+ " * The SQL was invalid.\n"
+ "Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.\n"
+ "The full error: %s") % (connection.settings_dict['NAME'], e)
+ six.reraise(CommandError, CommandError(new_msg), sys.exc_info()[2])
+
+ if not inhibit_post_syncdb:
+ self.emit_post_syncdb(verbosity, interactive, db)
+
+ # Reinstall the initial_data fixture.
+ if options.get('load_initial_data'):
+ # Reinstall the initial_data fixture.
+ call_command('loaddata', 'initial_data', **options)
+
+ else:
+ self.stdout.write("Flush cancelled.\n")
+
+ @staticmethod
+ def emit_post_syncdb(verbosity, interactive, database):
+ # Emit the post sync signal. This allows individual applications to
+ # respond as if the database had been sync'd from scratch.
+ all_models = []
+ for app in models.get_apps():
+ all_models.extend([
+ m for m in models.get_models(app, include_auto_created=True)
+ if router.allow_syncdb(database, m)
+ ])
+ emit_post_sync_signal(set(all_models), verbosity, interactive, database)
diff --git a/lib/python2.7/site-packages/django/core/management/commands/inspectdb.py b/lib/python2.7/site-packages/django/core/management/commands/inspectdb.py
new file mode 100644
index 0000000..48f7fd9
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/inspectdb.py
@@ -0,0 +1,237 @@
+from __future__ import unicode_literals
+
+import keyword
+import re
+from optparse import make_option
+
+from django.core.management.base import NoArgsCommand, CommandError
+from django.db import connections, DEFAULT_DB_ALIAS
+from django.utils.datastructures import SortedDict
+
+
+class Command(NoArgsCommand):
+ help = "Introspects the database tables in the given database and outputs a Django model module."
+
+ option_list = NoArgsCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to '
+ 'introspect. Defaults to using the "default" database.'),
+ )
+
+ requires_model_validation = False
+
+ db_module = 'django.db'
+
+ def handle_noargs(self, **options):
+ try:
+ for line in self.handle_inspection(options):
+ self.stdout.write("%s\n" % line)
+ except NotImplementedError:
+ raise CommandError("Database inspection isn't supported for the currently selected database backend.")
+
+ def handle_inspection(self, options):
+ connection = connections[options.get('database')]
+ # 'table_name_filter' is a stealth option
+ table_name_filter = options.get('table_name_filter')
+
+ table2model = lambda table_name: table_name.title().replace('_', '').replace(' ', '').replace('-', '')
+ strip_prefix = lambda s: s[1:] if s.startswith("u'") else s
+
+ cursor = connection.cursor()
+ yield "# This is an auto-generated Django model module."
+ yield "# You'll have to do the following manually to clean this up:"
+ yield "# * Rearrange models' order"
+ yield "# * Make sure each model has one field with primary_key=True"
+ yield "# * Remove `managed = False` lines if you wish to allow Django to create and delete the table"
+ yield "# Feel free to rename the models, but don't rename db_table values or field names."
+ yield "#"
+ yield "# Also note: You'll have to insert the output of 'django-admin.py sqlcustom [appname]'"
+ yield "# into your database."
+ yield "from __future__ import unicode_literals"
+ yield ''
+ yield 'from %s import models' % self.db_module
+ yield ''
+ known_models = []
+ for table_name in connection.introspection.table_names(cursor):
+ if table_name_filter is not None and callable(table_name_filter):
+ if not table_name_filter(table_name):
+ continue
+ yield 'class %s(models.Model):' % table2model(table_name)
+ known_models.append(table2model(table_name))
+ try:
+ relations = connection.introspection.get_relations(cursor, table_name)
+ except NotImplementedError:
+ relations = {}
+ try:
+ indexes = connection.introspection.get_indexes(cursor, table_name)
+ except NotImplementedError:
+ indexes = {}
+ used_column_names = [] # Holds column names used in the table so far
+ for i, row in enumerate(connection.introspection.get_table_description(cursor, table_name)):
+ comment_notes = [] # Holds Field notes, to be displayed in a Python comment.
+ extra_params = SortedDict() # Holds Field parameters such as 'db_column'.
+ column_name = row[0]
+ is_relation = i in relations
+
+ att_name, params, notes = self.normalize_col_name(
+ column_name, used_column_names, is_relation)
+ extra_params.update(params)
+ comment_notes.extend(notes)
+
+ used_column_names.append(att_name)
+
+ # Add primary_key and unique, if necessary.
+ if column_name in indexes:
+ if indexes[column_name]['primary_key']:
+ extra_params['primary_key'] = True
+ elif indexes[column_name]['unique']:
+ extra_params['unique'] = True
+
+ if is_relation:
+ rel_to = "self" if relations[i][1] == table_name else table2model(relations[i][1])
+ if rel_to in known_models:
+ field_type = 'ForeignKey(%s' % rel_to
+ else:
+ field_type = "ForeignKey('%s'" % rel_to
+ else:
+ # Calling `get_field_type` to get the field type string and any
+ # additional paramters and notes.
+ field_type, field_params, field_notes = self.get_field_type(connection, table_name, row)
+ extra_params.update(field_params)
+ comment_notes.extend(field_notes)
+
+ field_type += '('
+
+ # Don't output 'id = meta.AutoField(primary_key=True)', because
+ # that's assumed if it doesn't exist.
+ if att_name == 'id' and field_type == 'AutoField(' and extra_params == {'primary_key': True}:
+ continue
+
+ # Add 'null' and 'blank', if the 'null_ok' flag was present in the
+ # table description.
+ if row[6]: # If it's NULL...
+ if field_type == 'BooleanField(':
+ field_type = 'NullBooleanField('
+ else:
+ extra_params['blank'] = True
+ if not field_type in ('TextField(', 'CharField('):
+ extra_params['null'] = True
+
+ field_desc = '%s = models.%s' % (att_name, field_type)
+ if extra_params:
+ if not field_desc.endswith('('):
+ field_desc += ', '
+ field_desc += ', '.join([
+ '%s=%s' % (k, strip_prefix(repr(v)))
+ for k, v in extra_params.items()])
+ field_desc += ')'
+ if comment_notes:
+ field_desc += ' # ' + ' '.join(comment_notes)
+ yield ' %s' % field_desc
+ for meta_line in self.get_meta(table_name):
+ yield meta_line
+
+ def normalize_col_name(self, col_name, used_column_names, is_relation):
+ """
+ Modify the column name to make it Python-compatible as a field name
+ """
+ field_params = {}
+ field_notes = []
+
+ new_name = col_name.lower()
+ if new_name != col_name:
+ field_notes.append('Field name made lowercase.')
+
+ if is_relation:
+ if new_name.endswith('_id'):
+ new_name = new_name[:-3]
+ else:
+ field_params['db_column'] = col_name
+
+ new_name, num_repl = re.subn(r'\W', '_', new_name)
+ if num_repl > 0:
+ field_notes.append('Field renamed to remove unsuitable characters.')
+
+ if new_name.find('__') >= 0:
+ while new_name.find('__') >= 0:
+ new_name = new_name.replace('__', '_')
+ if col_name.lower().find('__') >= 0:
+ # Only add the comment if the double underscore was in the original name
+ field_notes.append("Field renamed because it contained more than one '_' in a row.")
+
+ if new_name.startswith('_'):
+ new_name = 'field%s' % new_name
+ field_notes.append("Field renamed because it started with '_'.")
+
+ if new_name.endswith('_'):
+ new_name = '%sfield' % new_name
+ field_notes.append("Field renamed because it ended with '_'.")
+
+ if keyword.iskeyword(new_name):
+ new_name += '_field'
+ field_notes.append('Field renamed because it was a Python reserved word.')
+
+ if new_name[0].isdigit():
+ new_name = 'number_%s' % new_name
+ field_notes.append("Field renamed because it wasn't a valid Python identifier.")
+
+ if new_name in used_column_names:
+ num = 0
+ while '%s_%d' % (new_name, num) in used_column_names:
+ num += 1
+ new_name = '%s_%d' % (new_name, num)
+ field_notes.append('Field renamed because of name conflict.')
+
+ if col_name != new_name and field_notes:
+ field_params['db_column'] = col_name
+
+ return new_name, field_params, field_notes
+
+ def get_field_type(self, connection, table_name, row):
+ """
+ Given the database connection, the table name, and the cursor row
+ description, this routine will return the given field type name, as
+ well as any additional keyword parameters and notes for the field.
+ """
+ field_params = SortedDict()
+ field_notes = []
+
+ try:
+ field_type = connection.introspection.get_field_type(row[1], row)
+ except KeyError:
+ field_type = 'TextField'
+ field_notes.append('This field type is a guess.')
+
+ # This is a hook for DATA_TYPES_REVERSE to return a tuple of
+ # (field_type, field_params_dict).
+ if type(field_type) is tuple:
+ field_type, new_params = field_type
+ field_params.update(new_params)
+
+ # Add max_length for all CharFields.
+ if field_type == 'CharField' and row[3]:
+ field_params['max_length'] = int(row[3])
+
+ if field_type == 'DecimalField':
+ if row[4] is None or row[5] is None:
+ field_notes.append(
+ 'max_digits and decimal_places have been guessed, as this '
+ 'database handles decimal fields as float')
+ field_params['max_digits'] = row[4] if row[4] is not None else 10
+ field_params['decimal_places'] = row[5] if row[5] is not None else 5
+ else:
+ field_params['max_digits'] = row[4]
+ field_params['decimal_places'] = row[5]
+
+ return field_type, field_params, field_notes
+
+ def get_meta(self, table_name):
+ """
+ Return a sequence comprising the lines of code necessary
+ to construct the inner Meta class for the model corresponding
+ to the given database table name.
+ """
+ return [" class Meta:",
+ " managed = False",
+ " db_table = '%s'" % table_name,
+ ""]
diff --git a/lib/python2.7/site-packages/django/core/management/commands/loaddata.py b/lib/python2.7/site-packages/django/core/management/commands/loaddata.py
new file mode 100644
index 0000000..cbadfa9
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/loaddata.py
@@ -0,0 +1,282 @@
+from __future__ import unicode_literals
+
+import glob
+import gzip
+import os
+import zipfile
+from optparse import make_option
+import warnings
+
+from django.conf import settings
+from django.core import serializers
+from django.core.management.base import BaseCommand, CommandError
+from django.core.management.color import no_style
+from django.db import (connections, router, transaction, DEFAULT_DB_ALIAS,
+ IntegrityError, DatabaseError)
+from django.db.models import get_app_paths
+from django.utils.encoding import force_text
+from django.utils.functional import cached_property, memoize
+from django.utils._os import upath
+from itertools import product
+
+try:
+ import bz2
+ has_bz2 = True
+except ImportError:
+ has_bz2 = False
+
+
+class Command(BaseCommand):
+ help = 'Installs the named fixture(s) in the database.'
+ args = "fixture [fixture ...]"
+
+ option_list = BaseCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a specific database to load '
+ 'fixtures into. Defaults to the "default" database.'),
+ make_option('--ignorenonexistent', '-i', action='store_true', dest='ignore',
+ default=False, help='Ignores entries in the serialized data for fields'
+ ' that do not currently exist on the model.'),
+ )
+
+ def handle(self, *fixture_labels, **options):
+
+ self.ignore = options.get('ignore')
+ self.using = options.get('database')
+
+ if not len(fixture_labels):
+ raise CommandError(
+ "No database fixture specified. Please provide the path "
+ "of at least one fixture in the command line.")
+
+ self.verbosity = int(options.get('verbosity'))
+
+ with transaction.commit_on_success_unless_managed(using=self.using):
+ self.loaddata(fixture_labels)
+
+ # Close the DB connection -- unless we're still in a transaction. This
+ # is required as a workaround for an edge case in MySQL: if the same
+ # connection is used to create tables, load data, and query, the query
+ # can return incorrect results. See Django #7572, MySQL #37735.
+ if transaction.get_autocommit(self.using):
+ connections[self.using].close()
+
+ def loaddata(self, fixture_labels):
+ connection = connections[self.using]
+
+ # Keep a count of the installed objects and fixtures
+ self.fixture_count = 0
+ self.loaded_object_count = 0
+ self.fixture_object_count = 0
+ self.models = set()
+
+ self.serialization_formats = serializers.get_public_serializer_formats()
+ self.compression_formats = {
+ None: open,
+ 'gz': gzip.GzipFile,
+ 'zip': SingleZipReader
+ }
+ if has_bz2:
+ self.compression_formats['bz2'] = bz2.BZ2File
+
+ with connection.constraint_checks_disabled():
+ for fixture_label in fixture_labels:
+ self.load_label(fixture_label)
+
+ # Since we disabled constraint checks, we must manually check for
+ # any invalid keys that might have been added
+ table_names = [model._meta.db_table for model in self.models]
+ try:
+ connection.check_constraints(table_names=table_names)
+ except Exception as e:
+ e.args = ("Problem installing fixtures: %s" % e,)
+ raise
+
+ # If we found even one object in a fixture, we need to reset the
+ # database sequences.
+ if self.loaded_object_count > 0:
+ sequence_sql = connection.ops.sequence_reset_sql(no_style(), self.models)
+ if sequence_sql:
+ if self.verbosity >= 2:
+ self.stdout.write("Resetting sequences\n")
+ cursor = connection.cursor()
+ for line in sequence_sql:
+ cursor.execute(line)
+ cursor.close()
+
+ if self.verbosity >= 1:
+ if self.fixture_object_count == self.loaded_object_count:
+ self.stdout.write("Installed %d object(s) from %d fixture(s)" %
+ (self.loaded_object_count, self.fixture_count))
+ else:
+ self.stdout.write("Installed %d object(s) (of %d) from %d fixture(s)" %
+ (self.loaded_object_count, self.fixture_object_count, self.fixture_count))
+
+ def load_label(self, fixture_label):
+ """
+ Loads fixtures files for a given label.
+ """
+ for fixture_file, fixture_dir, fixture_name in self.find_fixtures(fixture_label):
+ _, ser_fmt, cmp_fmt = self.parse_name(os.path.basename(fixture_file))
+ open_method = self.compression_formats[cmp_fmt]
+ fixture = open_method(fixture_file, 'r')
+ try:
+ self.fixture_count += 1
+ objects_in_fixture = 0
+ loaded_objects_in_fixture = 0
+ if self.verbosity >= 2:
+ self.stdout.write("Installing %s fixture '%s' from %s." %
+ (ser_fmt, fixture_name, humanize(fixture_dir)))
+
+ objects = serializers.deserialize(ser_fmt, fixture,
+ using=self.using, ignorenonexistent=self.ignore)
+
+ for obj in objects:
+ objects_in_fixture += 1
+ if router.allow_syncdb(self.using, obj.object.__class__):
+ loaded_objects_in_fixture += 1
+ self.models.add(obj.object.__class__)
+ try:
+ obj.save(using=self.using)
+ except (DatabaseError, IntegrityError) as e:
+ e.args = ("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % {
+ 'app_label': obj.object._meta.app_label,
+ 'object_name': obj.object._meta.object_name,
+ 'pk': obj.object.pk,
+ 'error_msg': force_text(e)
+ },)
+ raise
+
+ self.loaded_object_count += loaded_objects_in_fixture
+ self.fixture_object_count += objects_in_fixture
+ except Exception as e:
+ if not isinstance(e, CommandError):
+ e.args = ("Problem installing fixture '%s': %s" % (fixture_file, e),)
+ raise
+ finally:
+ fixture.close()
+
+ # If the fixture we loaded contains 0 objects, assume that an
+ # error was encountered during fixture loading.
+ if objects_in_fixture == 0:
+ raise CommandError(
+ "No fixture data found for '%s'. "
+ "(File format may be invalid.)" % fixture_name)
+
+ def _find_fixtures(self, fixture_label):
+ """
+ Finds fixture files for a given label.
+ """
+ fixture_name, ser_fmt, cmp_fmt = self.parse_name(fixture_label)
+ databases = [self.using, None]
+ cmp_fmts = list(self.compression_formats.keys()) if cmp_fmt is None else [cmp_fmt]
+ ser_fmts = serializers.get_public_serializer_formats() if ser_fmt is None else [ser_fmt]
+
+ if self.verbosity >= 2:
+ self.stdout.write("Loading '%s' fixtures..." % fixture_name)
+
+ if os.path.isabs(fixture_name):
+ fixture_dirs = [os.path.dirname(fixture_name)]
+ fixture_name = os.path.basename(fixture_name)
+ else:
+ fixture_dirs = self.fixture_dirs
+ if os.path.sep in fixture_name:
+ fixture_dirs = [os.path.join(dir_, os.path.dirname(fixture_name))
+ for dir_ in fixture_dirs]
+ fixture_name = os.path.basename(fixture_name)
+
+ suffixes = ('.'.join(ext for ext in combo if ext)
+ for combo in product(databases, ser_fmts, cmp_fmts))
+ targets = set('.'.join((fixture_name, suffix)) for suffix in suffixes)
+
+ fixture_files = []
+ for fixture_dir in fixture_dirs:
+ if self.verbosity >= 2:
+ self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir))
+ fixture_files_in_dir = []
+ for candidate in glob.iglob(os.path.join(fixture_dir, fixture_name + '*')):
+ if os.path.basename(candidate) in targets:
+ # Save the fixture_dir and fixture_name for future error messages.
+ fixture_files_in_dir.append((candidate, fixture_dir, fixture_name))
+
+ if self.verbosity >= 2 and not fixture_files_in_dir:
+ self.stdout.write("No fixture '%s' in %s." %
+ (fixture_name, humanize(fixture_dir)))
+
+ # Check kept for backwards-compatibility; it isn't clear why
+ # duplicates are only allowed in different directories.
+ if len(fixture_files_in_dir) > 1:
+ raise CommandError(
+ "Multiple fixtures named '%s' in %s. Aborting." %
+ (fixture_name, humanize(fixture_dir)))
+ fixture_files.extend(fixture_files_in_dir)
+
+ if fixture_name != 'initial_data' and not fixture_files:
+ # Warning kept for backwards-compatibility; why not an exception?
+ warnings.warn("No fixture named '%s' found." % fixture_name)
+
+ return fixture_files
+
+ _label_to_fixtures_cache = {}
+ find_fixtures = memoize(_find_fixtures, _label_to_fixtures_cache, 2)
+
+ @cached_property
+ def fixture_dirs(self):
+ """
+ Return a list of fixture directories.
+
+ The list contains the 'fixtures' subdirectory of each installed
+ application, if it exists, the directories in FIXTURE_DIRS, and the
+ current directory.
+ """
+ dirs = []
+ for path in get_app_paths():
+ d = os.path.join(os.path.dirname(path), 'fixtures')
+ if os.path.isdir(d):
+ dirs.append(d)
+ dirs.extend(list(settings.FIXTURE_DIRS))
+ dirs.append('')
+ dirs = [upath(os.path.abspath(os.path.realpath(d))) for d in dirs]
+ return dirs
+
+ def parse_name(self, fixture_name):
+ """
+ Splits fixture name in name, serialization format, compression format.
+ """
+ parts = fixture_name.rsplit('.', 2)
+
+ if len(parts) > 1 and parts[-1] in self.compression_formats:
+ cmp_fmt = parts[-1]
+ parts = parts[:-1]
+ else:
+ cmp_fmt = None
+
+ if len(parts) > 1:
+ if parts[-1] in self.serialization_formats:
+ ser_fmt = parts[-1]
+ parts = parts[:-1]
+ else:
+ raise CommandError(
+ "Problem installing fixture '%s': %s is not a known "
+ "serialization format." % (''.join(parts[:-1]), parts[-1]))
+ else:
+ ser_fmt = None
+
+ name = '.'.join(parts)
+
+ return name, ser_fmt, cmp_fmt
+
+
+class SingleZipReader(zipfile.ZipFile):
+
+ def __init__(self, *args, **kwargs):
+ zipfile.ZipFile.__init__(self, *args, **kwargs)
+ if len(self.namelist()) != 1:
+ raise ValueError("Zip-compressed fixtures must contain one file.")
+
+ def read(self):
+ return zipfile.ZipFile.read(self, self.namelist()[0])
+
+
+def humanize(dirname):
+ return "'%s'" % dirname if dirname else 'absolute path'
diff --git a/lib/python2.7/site-packages/django/core/management/commands/makemessages.py b/lib/python2.7/site-packages/django/core/management/commands/makemessages.py
new file mode 100644
index 0000000..5b09661
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/makemessages.py
@@ -0,0 +1,422 @@
+import fnmatch
+import glob
+import io
+import os
+import re
+import sys
+from itertools import dropwhile
+from optparse import make_option
+
+import django
+from django.core.management.base import CommandError, NoArgsCommand
+from django.core.management.utils import (handle_extensions, find_command,
+ popen_wrapper)
+from django.utils.encoding import force_str
+from django.utils.functional import total_ordering
+from django.utils.text import get_text_list
+from django.utils.jslex import prepare_js_for_gettext
+
+plural_forms_re = re.compile(r'^(?P<value>"Plural-Forms.+?\\n")\s*$', re.MULTILINE | re.DOTALL)
+STATUS_OK = 0
+
+
+def check_programs(*programs):
+ for program in programs:
+ if find_command(program) is None:
+ raise CommandError("Can't find %s. Make sure you have GNU "
+ "gettext tools 0.15 or newer installed." % program)
+
+
+@total_ordering
+class TranslatableFile(object):
+ def __init__(self, dirpath, file_name):
+ self.file = file_name
+ self.dirpath = dirpath
+
+ def __repr__(self):
+ return "<TranslatableFile: %s>" % os.sep.join([self.dirpath, self.file])
+
+ def __eq__(self, other):
+ return self.dirpath == other.dirpath and self.file == other.file
+
+ def __lt__(self, other):
+ if self.dirpath == other.dirpath:
+ return self.file < other.file
+ return self.dirpath < other.dirpath
+
+ def process(self, command, potfile, domain, keep_pot=False):
+ """
+ Extract translatable literals from self.file for :param domain:
+ creating or updating the :param potfile: POT file.
+
+ Uses the xgettext GNU gettext utility.
+ """
+
+ from django.utils.translation import templatize
+
+ if command.verbosity > 1:
+ command.stdout.write('processing file %s in %s\n' % (self.file, self.dirpath))
+ _, file_ext = os.path.splitext(self.file)
+ if domain == 'djangojs' and file_ext in command.extensions:
+ is_templatized = True
+ orig_file = os.path.join(self.dirpath, self.file)
+ with open(orig_file) as fp:
+ src_data = fp.read()
+ src_data = prepare_js_for_gettext(src_data)
+ thefile = '%s.c' % self.file
+ work_file = os.path.join(self.dirpath, thefile)
+ with open(work_file, "w") as fp:
+ fp.write(src_data)
+ args = [
+ 'xgettext',
+ '-d', domain,
+ '--language=C',
+ '--keyword=gettext_noop',
+ '--keyword=gettext_lazy',
+ '--keyword=ngettext_lazy:1,2',
+ '--keyword=pgettext:1c,2',
+ '--keyword=npgettext:1c,2,3',
+ '--from-code=UTF-8',
+ '--add-comments=Translators',
+ '--output=-'
+ ]
+ if command.wrap:
+ args.append(command.wrap)
+ if command.location:
+ args.append(command.location)
+ args.append(work_file)
+ elif domain == 'django' and (file_ext == '.py' or file_ext in command.extensions):
+ thefile = self.file
+ orig_file = os.path.join(self.dirpath, self.file)
+ is_templatized = file_ext in command.extensions
+ if is_templatized:
+ with open(orig_file, "rU") as fp:
+ src_data = fp.read()
+ thefile = '%s.py' % self.file
+ content = templatize(src_data, orig_file[2:])
+ with open(os.path.join(self.dirpath, thefile), "w") as fp:
+ fp.write(content)
+ work_file = os.path.join(self.dirpath, thefile)
+ args = [
+ 'xgettext',
+ '-d', domain,
+ '--language=Python',
+ '--keyword=gettext_noop',
+ '--keyword=gettext_lazy',
+ '--keyword=ngettext_lazy:1,2',
+ '--keyword=ugettext_noop',
+ '--keyword=ugettext_lazy',
+ '--keyword=ungettext_lazy:1,2',
+ '--keyword=pgettext:1c,2',
+ '--keyword=npgettext:1c,2,3',
+ '--keyword=pgettext_lazy:1c,2',
+ '--keyword=npgettext_lazy:1c,2,3',
+ '--from-code=UTF-8',
+ '--add-comments=Translators',
+ '--output=-'
+ ]
+ if command.wrap:
+ args.append(command.wrap)
+ if command.location:
+ args.append(command.location)
+ args.append(work_file)
+ else:
+ return
+ msgs, errors, status = popen_wrapper(args)
+ if errors:
+ if status != STATUS_OK:
+ if is_templatized:
+ os.unlink(work_file)
+ if not keep_pot and os.path.exists(potfile):
+ os.unlink(potfile)
+ raise CommandError(
+ "errors happened while running xgettext on %s\n%s" %
+ (self.file, errors))
+ elif command.verbosity > 0:
+ # Print warnings
+ command.stdout.write(errors)
+ if msgs:
+ if is_templatized:
+ # Remove '.py' suffix
+ if os.name =='nt':
+ # Preserve '.\' prefix on Windows to respect gettext behavior
+ old = '#: ' + work_file
+ new = '#: ' + orig_file
+ else:
+ old = '#: ' + work_file[2:]
+ new = '#: ' + orig_file[2:]
+ msgs = msgs.replace(old, new)
+ write_pot_file(potfile, msgs)
+ if is_templatized:
+ os.unlink(work_file)
+
+def write_pot_file(potfile, msgs):
+ """
+ Write the :param potfile: POT file with the :param msgs: contents,
+ previously making sure its format is valid.
+ """
+ if os.path.exists(potfile):
+ # Strip the header
+ msgs = '\n'.join(dropwhile(len, msgs.split('\n')))
+ else:
+ msgs = msgs.replace('charset=CHARSET', 'charset=UTF-8')
+ with open(potfile, 'a') as fp:
+ fp.write(msgs)
+
+
+class Command(NoArgsCommand):
+ option_list = NoArgsCommand.option_list + (
+ make_option('--locale', '-l', default=None, dest='locale', action='append',
+ help='Creates or updates the message files for the given locale(s) (e.g. pt_BR). '
+ 'Can be used multiple times.'),
+ make_option('--domain', '-d', default='django', dest='domain',
+ help='The domain of the message files (default: "django").'),
+ make_option('--all', '-a', action='store_true', dest='all',
+ default=False, help='Updates the message files for all existing locales.'),
+ make_option('--extension', '-e', dest='extensions',
+ help='The file extension(s) to examine (default: "html,txt", or "js" if the domain is "djangojs"). Separate multiple extensions with commas, or use -e multiple times.',
+ action='append'),
+ make_option('--symlinks', '-s', action='store_true', dest='symlinks',
+ default=False, help='Follows symlinks to directories when examining source code and templates for translation strings.'),
+ make_option('--ignore', '-i', action='append', dest='ignore_patterns',
+ default=[], metavar='PATTERN', help='Ignore files or directories matching this glob-style pattern. Use multiple times to ignore more.'),
+ make_option('--no-default-ignore', action='store_false', dest='use_default_ignore_patterns',
+ default=True, help="Don't ignore the common glob-style patterns 'CVS', '.*', '*~' and '*.pyc'."),
+ make_option('--no-wrap', action='store_true', dest='no_wrap',
+ default=False, help="Don't break long message lines into several lines."),
+ make_option('--no-location', action='store_true', dest='no_location',
+ default=False, help="Don't write '#: filename:line' lines."),
+ make_option('--no-obsolete', action='store_true', dest='no_obsolete',
+ default=False, help="Remove obsolete message strings."),
+ make_option('--keep-pot', action='store_true', dest='keep_pot',
+ default=False, help="Keep .pot file after making messages. Useful when debugging."),
+ )
+ help = ("Runs over the entire source tree of the current directory and "
+"pulls out all strings marked for translation. It creates (or updates) a message "
+"file in the conf/locale (in the django tree) or locale (for projects and "
+"applications) directory.\n\nYou must run this command with one of either the "
+"--locale or --all options.")
+
+ requires_model_validation = False
+ leave_locale_alone = True
+
+ def handle_noargs(self, *args, **options):
+ locale = options.get('locale')
+ self.domain = options.get('domain')
+ self.verbosity = int(options.get('verbosity'))
+ process_all = options.get('all')
+ extensions = options.get('extensions')
+ self.symlinks = options.get('symlinks')
+ ignore_patterns = options.get('ignore_patterns')
+ if options.get('use_default_ignore_patterns'):
+ ignore_patterns += ['CVS', '.*', '*~', '*.pyc']
+ self.ignore_patterns = list(set(ignore_patterns))
+ self.wrap = '--no-wrap' if options.get('no_wrap') else ''
+ self.location = '--no-location' if options.get('no_location') else ''
+ self.no_obsolete = options.get('no_obsolete')
+ self.keep_pot = options.get('keep_pot')
+
+ if self.domain not in ('django', 'djangojs'):
+ raise CommandError("currently makemessages only supports domains "
+ "'django' and 'djangojs'")
+ if self.domain == 'djangojs':
+ exts = extensions if extensions else ['js']
+ else:
+ exts = extensions if extensions else ['html', 'txt']
+ self.extensions = handle_extensions(exts)
+
+ if (locale is None and not process_all) or self.domain is None:
+ raise CommandError("Type '%s help %s' for usage information." % (
+ os.path.basename(sys.argv[0]), sys.argv[1]))
+
+ if self.verbosity > 1:
+ self.stdout.write('examining files with the extensions: %s\n'
+ % get_text_list(list(self.extensions), 'and'))
+
+ # Need to ensure that the i18n framework is enabled
+ from django.conf import settings
+ if settings.configured:
+ settings.USE_I18N = True
+ else:
+ settings.configure(USE_I18N = True)
+
+ self.invoked_for_django = False
+ if os.path.isdir(os.path.join('conf', 'locale')):
+ localedir = os.path.abspath(os.path.join('conf', 'locale'))
+ self.invoked_for_django = True
+ # Ignoring all contrib apps
+ self.ignore_patterns += ['contrib/*']
+ elif os.path.isdir('locale'):
+ localedir = os.path.abspath('locale')
+ else:
+ raise CommandError("This script should be run from the Django Git "
+ "tree or your project or app tree. If you did indeed run it "
+ "from the Git checkout or your project or application, "
+ "maybe you are just missing the conf/locale (in the django "
+ "tree) or locale (for project and application) directory? It "
+ "is not created automatically, you have to create it by hand "
+ "if you want to enable i18n for your project or application.")
+
+ check_programs('xgettext')
+
+ potfile = self.build_pot_file(localedir)
+
+ # Build po files for each selected locale
+ locales = []
+ if locale is not None:
+ locales = locale
+ elif process_all:
+ locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % localedir))
+ locales = [os.path.basename(l) for l in locale_dirs]
+
+ if locales:
+ check_programs('msguniq', 'msgmerge', 'msgattrib')
+
+ try:
+ for locale in locales:
+ if self.verbosity > 0:
+ self.stdout.write("processing locale %s\n" % locale)
+ self.write_po_file(potfile, locale)
+ finally:
+ if not self.keep_pot and os.path.exists(potfile):
+ os.unlink(potfile)
+
+ def build_pot_file(self, localedir):
+ file_list = self.find_files(".")
+
+ potfile = os.path.join(localedir, '%s.pot' % str(self.domain))
+ if os.path.exists(potfile):
+ # Remove a previous undeleted potfile, if any
+ os.unlink(potfile)
+
+ for f in file_list:
+ try:
+ f.process(self, potfile, self.domain, self.keep_pot)
+ except UnicodeDecodeError:
+ self.stdout.write("UnicodeDecodeError: skipped file %s in %s" % (f.file, f.dirpath))
+ return potfile
+
+ def find_files(self, root):
+ """
+ Helper method to get all files in the given root.
+ """
+
+ def is_ignored(path, ignore_patterns):
+ """
+ Check if the given path should be ignored or not.
+ """
+ filename = os.path.basename(path)
+ ignore = lambda pattern: fnmatch.fnmatchcase(filename, pattern)
+ return any(ignore(pattern) for pattern in ignore_patterns)
+
+ dir_suffix = '%s*' % os.sep
+ norm_patterns = [p[:-len(dir_suffix)] if p.endswith(dir_suffix) else p for p in self.ignore_patterns]
+ all_files = []
+ for dirpath, dirnames, filenames in os.walk(root, topdown=True, followlinks=self.symlinks):
+ for dirname in dirnames[:]:
+ if is_ignored(os.path.normpath(os.path.join(dirpath, dirname)), norm_patterns):
+ dirnames.remove(dirname)
+ if self.verbosity > 1:
+ self.stdout.write('ignoring directory %s\n' % dirname)
+ for filename in filenames:
+ if is_ignored(os.path.normpath(os.path.join(dirpath, filename)), self.ignore_patterns):
+ if self.verbosity > 1:
+ self.stdout.write('ignoring file %s in %s\n' % (filename, dirpath))
+ else:
+ all_files.append(TranslatableFile(dirpath, filename))
+ return sorted(all_files)
+
+ def write_po_file(self, potfile, locale):
+ """
+ Creates or updates the PO file for self.domain and :param locale:.
+ Uses contents of the existing :param potfile:.
+
+ Uses mguniq, msgmerge, and msgattrib GNU gettext utilities.
+ """
+ args = ['msguniq', '--to-code=utf-8']
+ if self.wrap:
+ args.append(self.wrap)
+ if self.location:
+ args.append(self.location)
+ args.append(potfile)
+ msgs, errors, status = popen_wrapper(args)
+ if errors:
+ if status != STATUS_OK:
+ raise CommandError(
+ "errors happened while running msguniq\n%s" % errors)
+ elif self.verbosity > 0:
+ self.stdout.write(errors)
+
+ basedir = os.path.join(os.path.dirname(potfile), locale, 'LC_MESSAGES')
+ if not os.path.isdir(basedir):
+ os.makedirs(basedir)
+ pofile = os.path.join(basedir, '%s.po' % str(self.domain))
+
+ if os.path.exists(pofile):
+ with open(potfile, 'w') as fp:
+ fp.write(msgs)
+ args = ['msgmerge', '-q']
+ if self.wrap:
+ args.append(self.wrap)
+ if self.location:
+ args.append(self.location)
+ args.extend([pofile, potfile])
+ msgs, errors, status = popen_wrapper(args)
+ if errors:
+ if status != STATUS_OK:
+ raise CommandError(
+ "errors happened while running msgmerge\n%s" % errors)
+ elif self.verbosity > 0:
+ self.stdout.write(errors)
+ elif not self.invoked_for_django:
+ msgs = self.copy_plural_forms(msgs, locale)
+ msgs = msgs.replace(
+ "#. #-#-#-#-# %s.pot (PACKAGE VERSION) #-#-#-#-#\n" % self.domain, "")
+ with open(pofile, 'w') as fp:
+ fp.write(msgs)
+
+ if self.no_obsolete:
+ args = ['msgattrib', '-o', pofile, '--no-obsolete']
+ if self.wrap:
+ args.append(self.wrap)
+ if self.location:
+ args.append(self.location)
+ args.append(pofile)
+ msgs, errors, status = popen_wrapper(args)
+ if errors:
+ if status != STATUS_OK:
+ raise CommandError(
+ "errors happened while running msgattrib\n%s" % errors)
+ elif self.verbosity > 0:
+ self.stdout.write(errors)
+
+ def copy_plural_forms(self, msgs, locale):
+ """
+ Copies plural forms header contents from a Django catalog of locale to
+ the msgs string, inserting it at the right place. msgs should be the
+ contents of a newly created .po file.
+ """
+ django_dir = os.path.normpath(os.path.join(os.path.dirname(django.__file__)))
+ if self.domain == 'djangojs':
+ domains = ('djangojs', 'django')
+ else:
+ domains = ('django',)
+ for domain in domains:
+ django_po = os.path.join(django_dir, 'conf', 'locale', locale, 'LC_MESSAGES', '%s.po' % domain)
+ if os.path.exists(django_po):
+ with io.open(django_po, 'rU', encoding='utf-8') as fp:
+ m = plural_forms_re.search(fp.read())
+ if m:
+ plural_form_line = force_str(m.group('value'))
+ if self.verbosity > 1:
+ self.stdout.write("copying plural forms: %s\n" % plural_form_line)
+ lines = []
+ found = False
+ for line in msgs.split('\n'):
+ if not found and (not line or plural_forms_re.search(line)):
+ line = '%s\n' % plural_form_line
+ found = True
+ lines.append(line)
+ msgs = '\n'.join(lines)
+ break
+ return msgs
diff --git a/lib/python2.7/site-packages/django/core/management/commands/runfcgi.py b/lib/python2.7/site-packages/django/core/management/commands/runfcgi.py
new file mode 100644
index 0000000..a60d4eb
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/runfcgi.py
@@ -0,0 +1,20 @@
+from django.core.management.base import BaseCommand
+
+class Command(BaseCommand):
+ help = "Runs this project as a FastCGI application. Requires flup."
+ args = '[various KEY=val options, use `runfcgi help` for help]'
+
+ def handle(self, *args, **options):
+ from django.conf import settings
+ from django.utils import translation
+ # Activate the current language, because it won't get activated later.
+ try:
+ translation.activate(settings.LANGUAGE_CODE)
+ except AttributeError:
+ pass
+ from django.core.servers.fastcgi import runfastcgi
+ runfastcgi(args)
+
+ def usage(self, subcommand):
+ from django.core.servers.fastcgi import FASTCGI_HELP
+ return FASTCGI_HELP
diff --git a/lib/python2.7/site-packages/django/core/management/commands/runserver.py b/lib/python2.7/site-packages/django/core/management/commands/runserver.py
new file mode 100644
index 0000000..503cff2
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/runserver.py
@@ -0,0 +1,149 @@
+from __future__ import unicode_literals
+
+from optparse import make_option
+from datetime import datetime
+import errno
+import os
+import re
+import sys
+import socket
+
+from django.core.management.base import BaseCommand, CommandError
+from django.core.servers.basehttp import run, get_internal_wsgi_application
+from django.utils import autoreload
+from django.utils import six
+
+naiveip_re = re.compile(r"""^(?:
+(?P<addr>
+ (?P<ipv4>\d{1,3}(?:\.\d{1,3}){3}) | # IPv4 address
+ (?P<ipv6>\[[a-fA-F0-9:]+\]) | # IPv6 address
+ (?P<fqdn>[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*) # FQDN
+):)?(?P<port>\d+)$""", re.X)
+DEFAULT_PORT = "8000"
+
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--ipv6', '-6', action='store_true', dest='use_ipv6', default=False,
+ help='Tells Django to use a IPv6 address.'),
+ make_option('--nothreading', action='store_false', dest='use_threading', default=True,
+ help='Tells Django to NOT use threading.'),
+ make_option('--noreload', action='store_false', dest='use_reloader', default=True,
+ help='Tells Django to NOT use the auto-reloader.'),
+ )
+ help = "Starts a lightweight Web server for development."
+ args = '[optional port number, or ipaddr:port]'
+
+ # Validation is called explicitly each time the server is reloaded.
+ requires_model_validation = False
+
+ def get_handler(self, *args, **options):
+ """
+ Returns the default WSGI handler for the runner.
+ """
+ return get_internal_wsgi_application()
+
+ def handle(self, addrport='', *args, **options):
+ from django.conf import settings
+
+ if not settings.DEBUG and not settings.ALLOWED_HOSTS:
+ raise CommandError('You must set settings.ALLOWED_HOSTS if DEBUG is False.')
+
+ self.use_ipv6 = options.get('use_ipv6')
+ if self.use_ipv6 and not socket.has_ipv6:
+ raise CommandError('Your Python does not support IPv6.')
+ if args:
+ raise CommandError('Usage is runserver %s' % self.args)
+ self._raw_ipv6 = False
+ if not addrport:
+ self.addr = ''
+ self.port = DEFAULT_PORT
+ else:
+ m = re.match(naiveip_re, addrport)
+ if m is None:
+ raise CommandError('"%s" is not a valid port number '
+ 'or address:port pair.' % addrport)
+ self.addr, _ipv4, _ipv6, _fqdn, self.port = m.groups()
+ if not self.port.isdigit():
+ raise CommandError("%r is not a valid port number." % self.port)
+ if self.addr:
+ if _ipv6:
+ self.addr = self.addr[1:-1]
+ self.use_ipv6 = True
+ self._raw_ipv6 = True
+ elif self.use_ipv6 and not _fqdn:
+ raise CommandError('"%s" is not a valid IPv6 address.' % self.addr)
+ if not self.addr:
+ self.addr = '::1' if self.use_ipv6 else '127.0.0.1'
+ self._raw_ipv6 = bool(self.use_ipv6)
+ self.run(*args, **options)
+
+ def run(self, *args, **options):
+ """
+ Runs the server, using the autoreloader if needed
+ """
+ use_reloader = options.get('use_reloader')
+
+ if use_reloader:
+ autoreload.main(self.inner_run, args, options)
+ else:
+ self.inner_run(*args, **options)
+
+ def inner_run(self, *args, **options):
+ from django.conf import settings
+ from django.utils import translation
+
+ threading = options.get('use_threading')
+ shutdown_message = options.get('shutdown_message', '')
+ quit_command = 'CTRL-BREAK' if sys.platform == 'win32' else 'CONTROL-C'
+
+ self.stdout.write("Validating models...\n\n")
+ self.validate(display_num_errors=True)
+ now = datetime.now().strftime('%B %d, %Y - %X')
+ if six.PY2:
+ now = now.decode('utf-8')
+
+ self.stdout.write((
+ "%(started_at)s\n"
+ "Django version %(version)s, using settings %(settings)r\n"
+ "Starting development server at http://%(addr)s:%(port)s/\n"
+ "Quit the server with %(quit_command)s.\n"
+ ) % {
+ "started_at": now,
+ "version": self.get_version(),
+ "settings": settings.SETTINGS_MODULE,
+ "addr": '[%s]' % self.addr if self._raw_ipv6 else self.addr,
+ "port": self.port,
+ "quit_command": quit_command,
+ })
+ # django.core.management.base forces the locale to en-us. We should
+ # set it up correctly for the first request (particularly important
+ # in the "--noreload" case).
+ translation.activate(settings.LANGUAGE_CODE)
+
+ try:
+ handler = self.get_handler(*args, **options)
+ run(self.addr, int(self.port), handler,
+ ipv6=self.use_ipv6, threading=threading)
+ except socket.error as e:
+ # Use helpful error messages instead of ugly tracebacks.
+ ERRORS = {
+ errno.EACCES: "You don't have permission to access that port.",
+ errno.EADDRINUSE: "That port is already in use.",
+ errno.EADDRNOTAVAIL: "That IP address can't be assigned-to.",
+ }
+ try:
+ error_text = ERRORS[e.errno]
+ except KeyError:
+ error_text = str(e)
+ self.stderr.write("Error: %s" % error_text)
+ # Need to use an OS exit because sys.exit doesn't work in a thread
+ os._exit(1)
+ except KeyboardInterrupt:
+ if shutdown_message:
+ self.stdout.write(shutdown_message)
+ sys.exit(0)
+
+
+# Kept for backward compatibility
+BaseRunserverCommand = Command
diff --git a/lib/python2.7/site-packages/django/core/management/commands/shell.py b/lib/python2.7/site-packages/django/core/management/commands/shell.py
new file mode 100644
index 0000000..00a6602
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/shell.py
@@ -0,0 +1,113 @@
+import os
+from django.core.management.base import NoArgsCommand
+from optparse import make_option
+
+
+class Command(NoArgsCommand):
+ shells = ['ipython', 'bpython']
+
+ option_list = NoArgsCommand.option_list + (
+ make_option('--plain', action='store_true', dest='plain',
+ help='Tells Django to use plain Python, not IPython or bpython.'),
+ make_option('--no-startup', action='store_true', dest='no_startup',
+ help='When using plain Python, ignore the PYTHONSTARTUP environment variable and ~/.pythonrc.py script.'),
+ make_option('-i', '--interface', action='store', type='choice', choices=shells,
+ dest='interface',
+ help='Specify an interactive interpreter interface. Available options: "ipython" and "bpython"'),
+
+ )
+ help = "Runs a Python interactive interpreter. Tries to use IPython or bpython, if one of them is available."
+ requires_model_validation = False
+
+ def _ipython_pre_011(self):
+ """Start IPython pre-0.11"""
+ from IPython.Shell import IPShell
+ shell = IPShell(argv=[])
+ shell.mainloop()
+
+ def _ipython_pre_100(self):
+ """Start IPython pre-1.0.0"""
+ from IPython.frontend.terminal.ipapp import TerminalIPythonApp
+ app = TerminalIPythonApp.instance()
+ app.initialize(argv=[])
+ app.start()
+
+ def _ipython(self):
+ """Start IPython >= 1.0"""
+ from IPython import start_ipython
+ start_ipython(argv=[])
+
+ def ipython(self):
+ """Start any version of IPython"""
+ for ip in (self._ipython, self._ipython_pre_100, self._ipython_pre_011):
+ try:
+ ip()
+ except ImportError:
+ pass
+ else:
+ return
+ # no IPython, raise ImportError
+ raise ImportError("No IPython")
+
+ def bpython(self):
+ import bpython
+ bpython.embed()
+
+ def run_shell(self, shell=None):
+ available_shells = [shell] if shell else self.shells
+
+ for shell in available_shells:
+ try:
+ return getattr(self, shell)()
+ except ImportError:
+ pass
+ raise ImportError
+
+ def handle_noargs(self, **options):
+ # XXX: (Temporary) workaround for ticket #1796: force early loading of all
+ # models from installed apps.
+ from django.db.models.loading import get_models
+ get_models()
+
+ use_plain = options.get('plain', False)
+ no_startup = options.get('no_startup', False)
+ interface = options.get('interface', None)
+
+ try:
+ if use_plain:
+ # Don't bother loading IPython, because the user wants plain Python.
+ raise ImportError
+
+ self.run_shell(shell=interface)
+ except ImportError:
+ import code
+ # Set up a dictionary to serve as the environment for the shell, so
+ # that tab completion works on objects that are imported at runtime.
+ # See ticket 5082.
+ imported_objects = {}
+ try: # Try activating rlcompleter, because it's handy.
+ import readline
+ except ImportError:
+ pass
+ else:
+ # We don't have to wrap the following import in a 'try', because
+ # we already know 'readline' was imported successfully.
+ import rlcompleter
+ readline.set_completer(rlcompleter.Completer(imported_objects).complete)
+ readline.parse_and_bind("tab:complete")
+
+ # We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow system
+ # conventions and get $PYTHONSTARTUP first then .pythonrc.py.
+ if not no_startup:
+ for pythonrc in (os.environ.get("PYTHONSTARTUP"), '~/.pythonrc.py'):
+ if not pythonrc:
+ continue
+ pythonrc = os.path.expanduser(pythonrc)
+ if not os.path.isfile(pythonrc):
+ continue
+ try:
+ with open(pythonrc) as handle:
+ exec(compile(handle.read(), pythonrc, 'exec'), imported_objects)
+ except NameError:
+ pass
+ code.interact(local=imported_objects)
diff --git a/lib/python2.7/site-packages/django/core/management/commands/sql.py b/lib/python2.7/site-packages/django/core/management/commands/sql.py
new file mode 100644
index 0000000..52b2058
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/sql.py
@@ -0,0 +1,21 @@
+from __future__ import unicode_literals
+
+from optparse import make_option
+
+from django.core.management.base import AppCommand
+from django.core.management.sql import sql_create
+from django.db import connections, DEFAULT_DB_ALIAS
+
+class Command(AppCommand):
+ help = "Prints the CREATE TABLE SQL statements for the given app name(s)."
+
+ option_list = AppCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
+ 'SQL for. Defaults to the "default" database.'),
+ )
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ return '\n'.join(sql_create(app, self.style, connections[options.get('database')]))
diff --git a/lib/python2.7/site-packages/django/core/management/commands/sqlall.py b/lib/python2.7/site-packages/django/core/management/commands/sqlall.py
new file mode 100644
index 0000000..0e2c05b
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/sqlall.py
@@ -0,0 +1,22 @@
+from __future__ import unicode_literals
+
+from optparse import make_option
+
+from django.core.management.base import AppCommand
+from django.core.management.sql import sql_all
+from django.db import connections, DEFAULT_DB_ALIAS
+
+
+class Command(AppCommand):
+ help = "Prints the CREATE TABLE, custom SQL and CREATE INDEX SQL statements for the given model module name(s)."
+
+ option_list = AppCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
+ 'SQL for. Defaults to the "default" database.'),
+ )
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ return '\n'.join(sql_all(app, self.style, connections[options.get('database')]))
diff --git a/lib/python2.7/site-packages/django/core/management/commands/sqlclear.py b/lib/python2.7/site-packages/django/core/management/commands/sqlclear.py
new file mode 100644
index 0000000..ec2602d
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/sqlclear.py
@@ -0,0 +1,21 @@
+from __future__ import unicode_literals
+
+from optparse import make_option
+
+from django.core.management.base import AppCommand
+from django.core.management.sql import sql_delete
+from django.db import connections, DEFAULT_DB_ALIAS
+
+class Command(AppCommand):
+ help = "Prints the DROP TABLE SQL statements for the given app name(s)."
+
+ option_list = AppCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
+ 'SQL for. Defaults to the "default" database.'),
+ )
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ return '\n'.join(sql_delete(app, self.style, connections[options.get('database')]))
diff --git a/lib/python2.7/site-packages/django/core/management/commands/sqlcustom.py b/lib/python2.7/site-packages/django/core/management/commands/sqlcustom.py
new file mode 100644
index 0000000..0d46c4e
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/sqlcustom.py
@@ -0,0 +1,21 @@
+from __future__ import unicode_literals
+
+from optparse import make_option
+
+from django.core.management.base import AppCommand
+from django.core.management.sql import sql_custom
+from django.db import connections, DEFAULT_DB_ALIAS
+
+class Command(AppCommand):
+ help = "Prints the custom table modifying SQL statements for the given app name(s)."
+
+ option_list = AppCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
+ 'SQL for. Defaults to the "default" database.'),
+ )
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ return '\n'.join(sql_custom(app, self.style, connections[options.get('database')]))
diff --git a/lib/python2.7/site-packages/django/core/management/commands/sqldropindexes.py b/lib/python2.7/site-packages/django/core/management/commands/sqldropindexes.py
new file mode 100644
index 0000000..fce7721
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/sqldropindexes.py
@@ -0,0 +1,23 @@
+from __future__ import unicode_literals
+
+from optparse import make_option
+
+from django.core.management.base import AppCommand
+from django.core.management.sql import sql_destroy_indexes
+from django.db import connections, DEFAULT_DB_ALIAS
+
+class Command(AppCommand):
+ help = "Prints the DROP INDEX SQL statements for the given model module name(s)."
+
+ option_list = AppCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
+ 'SQL for. Defaults to the "default" database.'),
+
+ )
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ return '\n'.join(sql_destroy_indexes(app, self.style, connections[options.get('database')]))
+
diff --git a/lib/python2.7/site-packages/django/core/management/commands/sqlflush.py b/lib/python2.7/site-packages/django/core/management/commands/sqlflush.py
new file mode 100644
index 0000000..b98ecfd
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/sqlflush.py
@@ -0,0 +1,21 @@
+from __future__ import unicode_literals
+
+from optparse import make_option
+
+from django.core.management.base import NoArgsCommand
+from django.core.management.sql import sql_flush
+from django.db import connections, DEFAULT_DB_ALIAS
+
+class Command(NoArgsCommand):
+ help = "Returns a list of the SQL statements required to return all tables in the database to the state they were in just after they were installed."
+
+ option_list = NoArgsCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
+ 'SQL for. Defaults to the "default" database.'),
+ )
+
+ output_transaction = True
+
+ def handle_noargs(self, **options):
+ return '\n'.join(sql_flush(self.style, connections[options.get('database')], only_django=True))
diff --git a/lib/python2.7/site-packages/django/core/management/commands/sqlindexes.py b/lib/python2.7/site-packages/django/core/management/commands/sqlindexes.py
new file mode 100644
index 0000000..f95d4f1
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/sqlindexes.py
@@ -0,0 +1,22 @@
+from __future__ import unicode_literals
+
+from optparse import make_option
+
+from django.core.management.base import AppCommand
+from django.core.management.sql import sql_indexes
+from django.db import connections, DEFAULT_DB_ALIAS
+
+class Command(AppCommand):
+ help = "Prints the CREATE INDEX SQL statements for the given model module name(s)."
+
+ option_list = AppCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
+ 'SQL for. Defaults to the "default" database.'),
+
+ )
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ return '\n'.join(sql_indexes(app, self.style, connections[options.get('database')]))
diff --git a/lib/python2.7/site-packages/django/core/management/commands/sqlinitialdata.py b/lib/python2.7/site-packages/django/core/management/commands/sqlinitialdata.py
new file mode 100644
index 0000000..b9e2249
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/sqlinitialdata.py
@@ -0,0 +1,7 @@
+from django.core.management.base import AppCommand, CommandError
+
+class Command(AppCommand):
+ help = "RENAMED: see 'sqlcustom'"
+
+ def handle(self, *apps, **options):
+ raise CommandError("This command has been renamed. Use the 'sqlcustom' command instead.")
diff --git a/lib/python2.7/site-packages/django/core/management/commands/sqlsequencereset.py b/lib/python2.7/site-packages/django/core/management/commands/sqlsequencereset.py
new file mode 100644
index 0000000..7b9e85a
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/sqlsequencereset.py
@@ -0,0 +1,22 @@
+from __future__ import unicode_literals
+
+from optparse import make_option
+
+from django.core.management.base import AppCommand
+from django.db import connections, models, DEFAULT_DB_ALIAS
+
+class Command(AppCommand):
+ help = 'Prints the SQL statements for resetting sequences for the given app name(s).'
+
+ option_list = AppCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
+ 'SQL for. Defaults to the "default" database.'),
+
+ )
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ connection = connections[options.get('database')]
+ return '\n'.join(connection.ops.sequence_reset_sql(self.style, models.get_models(app, include_auto_created=True)))
diff --git a/lib/python2.7/site-packages/django/core/management/commands/startapp.py b/lib/python2.7/site-packages/django/core/management/commands/startapp.py
new file mode 100644
index 0000000..692ad09
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/startapp.py
@@ -0,0 +1,24 @@
+from django.core.management.base import CommandError
+from django.core.management.templates import TemplateCommand
+from django.utils.importlib import import_module
+
+
+class Command(TemplateCommand):
+ help = ("Creates a Django app directory structure for the given app "
+ "name in the current directory or optionally in the given "
+ "directory.")
+
+ def handle(self, app_name=None, target=None, **options):
+ self.validate_name(app_name, "app")
+
+ # Check that the app_name cannot be imported.
+ try:
+ import_module(app_name)
+ except ImportError:
+ pass
+ else:
+ raise CommandError("%r conflicts with the name of an existing "
+ "Python module and cannot be used as an app "
+ "name. Please try another name." % app_name)
+
+ super(Command, self).handle('app', app_name, target, **options)
diff --git a/lib/python2.7/site-packages/django/core/management/commands/startproject.py b/lib/python2.7/site-packages/django/core/management/commands/startproject.py
new file mode 100644
index 0000000..b143e6c
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/startproject.py
@@ -0,0 +1,30 @@
+from django.core.management.base import CommandError
+from django.core.management.templates import TemplateCommand
+from django.utils.crypto import get_random_string
+from django.utils.importlib import import_module
+
+
+class Command(TemplateCommand):
+ help = ("Creates a Django project directory structure for the given "
+ "project name in the current directory or optionally in the "
+ "given directory.")
+
+ def handle(self, project_name=None, target=None, *args, **options):
+ self.validate_name(project_name, "project")
+
+ # Check that the project_name cannot be imported.
+ try:
+ import_module(project_name)
+ except ImportError:
+ pass
+ else:
+ raise CommandError("%r conflicts with the name of an existing "
+ "Python module and cannot be used as a "
+ "project name. Please try another name." %
+ project_name)
+
+ # Create a random SECRET_KEY hash to put it in the main settings.
+ chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
+ options['secret_key'] = get_random_string(50, chars)
+
+ super(Command, self).handle('project', project_name, target, **options)
diff --git a/lib/python2.7/site-packages/django/core/management/commands/syncdb.py b/lib/python2.7/site-packages/django/core/management/commands/syncdb.py
new file mode 100644
index 0000000..3e73d24
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/syncdb.py
@@ -0,0 +1,162 @@
+from optparse import make_option
+import itertools
+import traceback
+
+from django.conf import settings
+from django.core.management import call_command
+from django.core.management.base import NoArgsCommand
+from django.core.management.color import no_style
+from django.core.management.sql import custom_sql_for_model, emit_post_sync_signal, emit_pre_sync_signal
+from django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS
+from django.utils.datastructures import SortedDict
+from django.utils.importlib import import_module
+
+
+class Command(NoArgsCommand):
+ option_list = NoArgsCommand.option_list + (
+ make_option('--noinput', action='store_false', dest='interactive', default=True,
+ help='Tells Django to NOT prompt the user for input of any kind.'),
+ make_option('--no-initial-data', action='store_false', dest='load_initial_data', default=True,
+ help='Tells Django not to load any initial data after database synchronization.'),
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to synchronize. '
+ 'Defaults to the "default" database.'),
+ )
+ help = "Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created."
+
+ def handle_noargs(self, **options):
+
+ verbosity = int(options.get('verbosity'))
+ interactive = options.get('interactive')
+ show_traceback = options.get('traceback')
+ load_initial_data = options.get('load_initial_data')
+
+ self.style = no_style()
+
+ # Import the 'management' module within each installed app, to register
+ # dispatcher events.
+ for app_name in settings.INSTALLED_APPS:
+ try:
+ import_module('.management', app_name)
+ except ImportError as exc:
+ # This is slightly hackish. We want to ignore ImportErrors
+ # if the "management" module itself is missing -- but we don't
+ # want to ignore the exception if the management module exists
+ # but raises an ImportError for some reason. The only way we
+ # can do this is to check the text of the exception. Note that
+ # we're a bit broad in how we check the text, because different
+ # Python implementations may not use the same text.
+ # CPython uses the text "No module named management"
+ # PyPy uses "No module named myproject.myapp.management"
+ msg = exc.args[0]
+ if not msg.startswith('No module named') or 'management' not in msg:
+ raise
+
+ db = options.get('database')
+ connection = connections[db]
+ cursor = connection.cursor()
+
+ # Get a list of already installed *models* so that references work right.
+ tables = connection.introspection.table_names()
+ seen_models = connection.introspection.installed_models(tables)
+ created_models = set()
+ pending_references = {}
+
+ # Build the manifest of apps and models that are to be synchronized
+ all_models = [
+ (app.__name__.split('.')[-2],
+ [m for m in models.get_models(app, include_auto_created=True)
+ if router.allow_syncdb(db, m)])
+ for app in models.get_apps()
+ ]
+
+ def model_installed(model):
+ opts = model._meta
+ converter = connection.introspection.table_name_converter
+ return not ((converter(opts.db_table) in tables) or
+ (opts.auto_created and converter(opts.auto_created._meta.db_table) in tables))
+
+ manifest = SortedDict(
+ (app_name, list(filter(model_installed, model_list)))
+ for app_name, model_list in all_models
+ )
+
+ create_models = set([x for x in itertools.chain(*manifest.values())])
+ emit_pre_sync_signal(create_models, verbosity, interactive, db)
+
+ # Create the tables for each model
+ if verbosity >= 1:
+ self.stdout.write("Creating tables ...\n")
+ with transaction.commit_on_success_unless_managed(using=db):
+ for app_name, model_list in manifest.items():
+ for model in model_list:
+ # Create the model's database table, if it doesn't already exist.
+ if verbosity >= 3:
+ self.stdout.write("Processing %s.%s model\n" % (app_name, model._meta.object_name))
+ sql, references = connection.creation.sql_create_model(model, self.style, seen_models)
+ seen_models.add(model)
+ created_models.add(model)
+ for refto, refs in references.items():
+ pending_references.setdefault(refto, []).extend(refs)
+ if refto in seen_models:
+ sql.extend(connection.creation.sql_for_pending_references(refto, self.style, pending_references))
+ sql.extend(connection.creation.sql_for_pending_references(model, self.style, pending_references))
+ if verbosity >= 1 and sql:
+ self.stdout.write("Creating table %s\n" % model._meta.db_table)
+ for statement in sql:
+ cursor.execute(statement)
+ tables.append(connection.introspection.table_name_converter(model._meta.db_table))
+
+ # Send the post_syncdb signal, so individual apps can do whatever they need
+ # to do at this point.
+ emit_post_sync_signal(created_models, verbosity, interactive, db)
+
+ # The connection may have been closed by a syncdb handler.
+ cursor = connection.cursor()
+
+ # Install custom SQL for the app (but only if this
+ # is a model we've just created)
+ if verbosity >= 1:
+ self.stdout.write("Installing custom SQL ...\n")
+ for app_name, model_list in manifest.items():
+ for model in model_list:
+ if model in created_models:
+ custom_sql = custom_sql_for_model(model, self.style, connection)
+ if custom_sql:
+ if verbosity >= 2:
+ self.stdout.write("Installing custom SQL for %s.%s model\n" % (app_name, model._meta.object_name))
+ try:
+ with transaction.commit_on_success_unless_managed(using=db):
+ for sql in custom_sql:
+ cursor.execute(sql)
+ except Exception as e:
+ self.stderr.write("Failed to install custom SQL for %s.%s model: %s\n" % \
+ (app_name, model._meta.object_name, e))
+ if show_traceback:
+ traceback.print_exc()
+ else:
+ if verbosity >= 3:
+ self.stdout.write("No custom SQL for %s.%s model\n" % (app_name, model._meta.object_name))
+
+ if verbosity >= 1:
+ self.stdout.write("Installing indexes ...\n")
+ # Install SQL indices for all newly created models
+ for app_name, model_list in manifest.items():
+ for model in model_list:
+ if model in created_models:
+ index_sql = connection.creation.sql_indexes_for_model(model, self.style)
+ if index_sql:
+ if verbosity >= 2:
+ self.stdout.write("Installing index for %s.%s model\n" % (app_name, model._meta.object_name))
+ try:
+ with transaction.commit_on_success_unless_managed(using=db):
+ for sql in index_sql:
+ cursor.execute(sql)
+ except Exception as e:
+ self.stderr.write("Failed to install index for %s.%s model: %s\n" % \
+ (app_name, model._meta.object_name, e))
+
+ # Load initial_data fixtures (unless that has been disabled)
+ if load_initial_data:
+ call_command('loaddata', 'initial_data', verbosity=verbosity,
+ database=db, skip_validation=True)
diff --git a/lib/python2.7/site-packages/django/core/management/commands/test.py b/lib/python2.7/site-packages/django/core/management/commands/test.py
new file mode 100644
index 0000000..5232c37
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/test.py
@@ -0,0 +1,91 @@
+import logging
+import sys
+import os
+from optparse import make_option, OptionParser
+
+from django.conf import settings
+from django.core.management.base import BaseCommand
+from django.test.utils import get_runner
+
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--noinput',
+ action='store_false', dest='interactive', default=True,
+ help='Tells Django to NOT prompt the user for input of any kind.'),
+ make_option('--failfast',
+ action='store_true', dest='failfast', default=False,
+ help='Tells Django to stop running the test suite after first '
+ 'failed test.'),
+ make_option('--testrunner',
+ action='store', dest='testrunner',
+ help='Tells Django to use specified test runner class instead of '
+ 'the one specified by the TEST_RUNNER setting.'),
+ make_option('--liveserver',
+ action='store', dest='liveserver', default=None,
+ help='Overrides the default address where the live server (used '
+ 'with LiveServerTestCase) is expected to run from. The '
+ 'default value is localhost:8081.'),
+ )
+ help = ('Discover and run tests in the specified modules or the current directory.')
+ args = '[path.to.modulename|path.to.modulename.TestCase|path.to.modulename.TestCase.test_method]...'
+
+ requires_model_validation = False
+
+ def __init__(self):
+ self.test_runner = None
+ super(Command, self).__init__()
+
+ def run_from_argv(self, argv):
+ """
+ Pre-parse the command line to extract the value of the --testrunner
+ option. This allows a test runner to define additional command line
+ arguments.
+ """
+ option = '--testrunner='
+ for arg in argv[2:]:
+ if arg.startswith(option):
+ self.test_runner = arg[len(option):]
+ break
+ super(Command, self).run_from_argv(argv)
+
+ def create_parser(self, prog_name, subcommand):
+ test_runner_class = get_runner(settings, self.test_runner)
+ options = self.option_list + getattr(
+ test_runner_class, 'option_list', ())
+ return OptionParser(prog=prog_name,
+ usage=self.usage(subcommand),
+ version=self.get_version(),
+ option_list=options)
+
+ def execute(self, *args, **options):
+ if int(options['verbosity']) > 0:
+ # ensure that deprecation warnings are displayed during testing
+ # the following state is assumed:
+ # logging.capturewarnings is true
+ # a "default" level warnings filter has been added for
+ # DeprecationWarning. See django.conf.LazySettings._configure_logging
+ logger = logging.getLogger('py.warnings')
+ handler = logging.StreamHandler()
+ logger.addHandler(handler)
+ super(Command, self).execute(*args, **options)
+ if int(options['verbosity']) > 0:
+ # remove the testing-specific handler
+ logger.removeHandler(handler)
+
+ def handle(self, *test_labels, **options):
+ from django.conf import settings
+ from django.test.utils import get_runner
+
+ TestRunner = get_runner(settings, options.get('testrunner'))
+ options['verbosity'] = int(options.get('verbosity'))
+
+ if options.get('liveserver') is not None:
+ os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = options['liveserver']
+ del options['liveserver']
+
+ test_runner = TestRunner(**options)
+ failures = test_runner.run_tests(test_labels)
+
+ if failures:
+ sys.exit(bool(failures))
diff --git a/lib/python2.7/site-packages/django/core/management/commands/testserver.py b/lib/python2.7/site-packages/django/core/management/commands/testserver.py
new file mode 100644
index 0000000..97fc8ea
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/testserver.py
@@ -0,0 +1,45 @@
+from django.core.management.base import BaseCommand
+
+from optparse import make_option
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--noinput', action='store_false', dest='interactive', default=True,
+ help='Tells Django to NOT prompt the user for input of any kind.'),
+ make_option('--addrport', action='store', dest='addrport',
+ type='string', default='',
+ help='port number or ipaddr:port to run the server on'),
+ make_option('--ipv6', '-6', action='store_true', dest='use_ipv6', default=False,
+ help='Tells Django to use a IPv6 address.'),
+ )
+ help = 'Runs a development server with data from the given fixture(s).'
+ args = '[fixture ...]'
+
+ requires_model_validation = False
+
+ def handle(self, *fixture_labels, **options):
+ from django.core.management import call_command
+ from django.db import connection
+
+ verbosity = int(options.get('verbosity'))
+ interactive = options.get('interactive')
+ addrport = options.get('addrport')
+
+ # Create a test database.
+ db_name = connection.creation.create_test_db(verbosity=verbosity, autoclobber=not interactive)
+
+ # Import the fixture data into the test database.
+ call_command('loaddata', *fixture_labels, **{'verbosity': verbosity})
+
+ # Run the development server. Turn off auto-reloading because it causes
+ # a strange error -- it causes this handle() method to be called
+ # multiple times.
+ shutdown_message = '\nServer stopped.\nNote that the test database, %r, has not been deleted. You can explore it on your own.' % db_name
+ use_threading = connection.features.test_db_allows_multiple_connections
+ call_command('runserver',
+ addrport=addrport,
+ shutdown_message=shutdown_message,
+ use_reloader=False,
+ use_ipv6=options['use_ipv6'],
+ use_threading=use_threading
+ )
diff --git a/lib/python2.7/site-packages/django/core/management/commands/validate.py b/lib/python2.7/site-packages/django/core/management/commands/validate.py
new file mode 100644
index 0000000..0dec3ea
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/commands/validate.py
@@ -0,0 +1,10 @@
+from django.core.management.base import NoArgsCommand
+
+
+class Command(NoArgsCommand):
+ help = "Validates all installed models."
+
+ requires_model_validation = False
+
+ def handle_noargs(self, **options):
+ self.validate(display_num_errors=True)
diff --git a/lib/python2.7/site-packages/django/core/management/sql.py b/lib/python2.7/site-packages/django/core/management/sql.py
new file mode 100644
index 0000000..b58d89f
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/sql.py
@@ -0,0 +1,216 @@
+from __future__ import unicode_literals
+
+import codecs
+import os
+import re
+
+from django.conf import settings
+from django.core.management.base import CommandError
+from django.db import models
+from django.db.models import get_models
+from django.utils._os import upath
+
+
+def sql_create(app, style, connection):
+ "Returns a list of the CREATE TABLE SQL statements for the given app."
+
+ if connection.settings_dict['ENGINE'] == 'django.db.backends.dummy':
+ # This must be the "dummy" database backend, which means the user
+ # hasn't set ENGINE for the database.
+ raise CommandError("Django doesn't know which syntax to use for your SQL statements,\n" +
+ "because you haven't properly specified the ENGINE setting for the database.\n" +
+ "see: https://docs.djangoproject.com/en/dev/ref/settings/#databases")
+
+ # Get installed models, so we generate REFERENCES right.
+ # We trim models from the current app so that the sqlreset command does not
+ # generate invalid SQL (leaving models out of known_models is harmless, so
+ # we can be conservative).
+ app_models = models.get_models(app, include_auto_created=True)
+ final_output = []
+ tables = connection.introspection.table_names()
+ known_models = set([model for model in connection.introspection.installed_models(tables) if model not in app_models])
+ pending_references = {}
+
+ for model in app_models:
+ output, references = connection.creation.sql_create_model(model, style, known_models)
+ final_output.extend(output)
+ for refto, refs in references.items():
+ pending_references.setdefault(refto, []).extend(refs)
+ if refto in known_models:
+ final_output.extend(connection.creation.sql_for_pending_references(refto, style, pending_references))
+ final_output.extend(connection.creation.sql_for_pending_references(model, style, pending_references))
+ # Keep track of the fact that we've created the table for this model.
+ known_models.add(model)
+
+ # Handle references to tables that are from other apps
+ # but don't exist physically.
+ not_installed_models = set(pending_references.keys())
+ if not_installed_models:
+ alter_sql = []
+ for model in not_installed_models:
+ alter_sql.extend(['-- ' + sql for sql in
+ connection.creation.sql_for_pending_references(model, style, pending_references)])
+ if alter_sql:
+ final_output.append('-- The following references should be added but depend on non-existent tables:')
+ final_output.extend(alter_sql)
+
+ return final_output
+
+
+def sql_delete(app, style, connection):
+ "Returns a list of the DROP TABLE SQL statements for the given app."
+
+ # This should work even if a connection isn't available
+ try:
+ cursor = connection.cursor()
+ except:
+ cursor = None
+
+ # Figure out which tables already exist
+ if cursor:
+ table_names = connection.introspection.table_names(cursor)
+ else:
+ table_names = []
+
+ output = []
+
+ # Output DROP TABLE statements for standard application tables.
+ to_delete = set()
+
+ references_to_delete = {}
+ app_models = models.get_models(app, include_auto_created=True)
+ for model in app_models:
+ if cursor and connection.introspection.table_name_converter(model._meta.db_table) in table_names:
+ # The table exists, so it needs to be dropped
+ opts = model._meta
+ for f in opts.local_fields:
+ if f.rel and f.rel.to not in to_delete:
+ references_to_delete.setdefault(f.rel.to, []).append((model, f))
+
+ to_delete.add(model)
+
+ for model in app_models:
+ if connection.introspection.table_name_converter(model._meta.db_table) in table_names:
+ output.extend(connection.creation.sql_destroy_model(model, references_to_delete, style))
+
+ # Close database connection explicitly, in case this output is being piped
+ # directly into a database client, to avoid locking issues.
+ if cursor:
+ cursor.close()
+ connection.close()
+
+ return output[::-1] # Reverse it, to deal with table dependencies.
+
+
+def sql_flush(style, connection, only_django=False, reset_sequences=True, allow_cascade=False):
+ """
+ Returns a list of the SQL statements used to flush the database.
+
+ If only_django is True, then only table names that have associated Django
+ models and are in INSTALLED_APPS will be included.
+ """
+ if only_django:
+ tables = connection.introspection.django_table_names(only_existing=True)
+ else:
+ tables = connection.introspection.table_names()
+ seqs = connection.introspection.sequence_list() if reset_sequences else ()
+ statements = connection.ops.sql_flush(style, tables, seqs, allow_cascade)
+ return statements
+
+
+def sql_custom(app, style, connection):
+ "Returns a list of the custom table modifying SQL statements for the given app."
+ output = []
+
+ app_models = get_models(app)
+
+ for model in app_models:
+ output.extend(custom_sql_for_model(model, style, connection))
+
+ return output
+
+
+def sql_indexes(app, style, connection):
+ "Returns a list of the CREATE INDEX SQL statements for all models in the given app."
+ output = []
+ for model in models.get_models(app, include_auto_created=True):
+ output.extend(connection.creation.sql_indexes_for_model(model, style))
+ return output
+
+
+def sql_destroy_indexes(app, style, connection):
+ "Returns a list of the DROP INDEX SQL statements for all models in the given app."
+ output = []
+ for model in models.get_models(app, include_auto_created=True):
+ output.extend(connection.creation.sql_destroy_indexes_for_model(model, style))
+ return output
+
+
+def sql_all(app, style, connection):
+ "Returns a list of CREATE TABLE SQL, initial-data inserts, and CREATE INDEX SQL for the given module."
+ return sql_create(app, style, connection) + sql_custom(app, style, connection) + sql_indexes(app, style, connection)
+
+
+def _split_statements(content):
+ comment_re = re.compile(r"^((?:'[^']*'|[^'])*?)--.*$")
+ statements = []
+ statement = []
+ for line in content.split("\n"):
+ cleaned_line = comment_re.sub(r"\1", line).strip()
+ if not cleaned_line:
+ continue
+ statement.append(cleaned_line)
+ if cleaned_line.endswith(";"):
+ statements.append(" ".join(statement))
+ statement = []
+ return statements
+
+
+def custom_sql_for_model(model, style, connection):
+ opts = model._meta
+ app_dir = os.path.normpath(os.path.join(os.path.dirname(upath(models.get_app(model._meta.app_label).__file__)), 'sql'))
+ output = []
+
+ # Post-creation SQL should come before any initial SQL data is loaded.
+ # However, this should not be done for models that are unmanaged or
+ # for fields that are part of a parent model (via model inheritance).
+ if opts.managed:
+ post_sql_fields = [f for f in opts.local_fields if hasattr(f, 'post_create_sql')]
+ for f in post_sql_fields:
+ output.extend(f.post_create_sql(style, model._meta.db_table))
+
+ # Find custom SQL, if it's available.
+ backend_name = connection.settings_dict['ENGINE'].split('.')[-1]
+ sql_files = [os.path.join(app_dir, "%s.%s.sql" % (opts.model_name, backend_name)),
+ os.path.join(app_dir, "%s.sql" % opts.model_name)]
+ for sql_file in sql_files:
+ if os.path.exists(sql_file):
+ with codecs.open(sql_file, 'U', encoding=settings.FILE_CHARSET) as fp:
+ # Some backends can't execute more than one SQL statement at a time,
+ # so split into separate statements.
+ output.extend(_split_statements(fp.read()))
+ return output
+
+
+def emit_pre_sync_signal(create_models, verbosity, interactive, db):
+ # Emit the pre_sync signal for every application.
+ for app in models.get_apps():
+ app_name = app.__name__.split('.')[-2]
+ if verbosity >= 2:
+ print("Running pre-sync handlers for application %s" % app_name)
+ models.signals.pre_syncdb.send(sender=app, app=app,
+ create_models=create_models,
+ verbosity=verbosity,
+ interactive=interactive,
+ db=db)
+
+
+def emit_post_sync_signal(created_models, verbosity, interactive, db):
+ # Emit the post_sync signal for every application.
+ for app in models.get_apps():
+ app_name = app.__name__.split('.')[-2]
+ if verbosity >= 2:
+ print("Running post-sync handlers for application %s" % app_name)
+ models.signals.post_syncdb.send(sender=app, app=app,
+ created_models=created_models, verbosity=verbosity,
+ interactive=interactive, db=db)
diff --git a/lib/python2.7/site-packages/django/core/management/templates.py b/lib/python2.7/site-packages/django/core/management/templates.py
new file mode 100644
index 0000000..164fd07
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/templates.py
@@ -0,0 +1,325 @@
+import cgi
+import errno
+import mimetypes
+import os
+import posixpath
+import re
+import shutil
+import stat
+import sys
+import tempfile
+
+from optparse import make_option
+from os import path
+
+import django
+from django.template import Template, Context
+from django.utils import archive
+from django.utils.six.moves.urllib.request import urlretrieve
+from django.utils._os import rmtree_errorhandler
+from django.core.management.base import BaseCommand, CommandError
+from django.core.management.utils import handle_extensions
+
+
+_drive_re = re.compile('^([a-z]):', re.I)
+_url_drive_re = re.compile('^([a-z])[:|]', re.I)
+
+
+class TemplateCommand(BaseCommand):
+ """
+ Copies either a Django application layout template or a Django project
+ layout template into the specified directory.
+
+ :param style: A color style object (see django.core.management.color).
+ :param app_or_project: The string 'app' or 'project'.
+ :param name: The name of the application or project.
+ :param directory: The directory to which the template should be copied.
+ :param options: The additional variables passed to project or app templates
+ """
+ args = "[name] [optional destination directory]"
+ option_list = BaseCommand.option_list + (
+ make_option('--template',
+ action='store', dest='template',
+ help='The path or URL to load the template from.'),
+ make_option('--extension', '-e', dest='extensions',
+ action='append', default=['py'],
+ help='The file extension(s) to render (default: "py"). '
+ 'Separate multiple extensions with commas, or use '
+ '-e multiple times.'),
+ make_option('--name', '-n', dest='files',
+ action='append', default=[],
+ help='The file name(s) to render. '
+ 'Separate multiple extensions with commas, or use '
+ '-n multiple times.')
+ )
+ requires_model_validation = False
+ # Can't import settings during this command, because they haven't
+ # necessarily been created.
+ can_import_settings = False
+ # The supported URL schemes
+ url_schemes = ['http', 'https', 'ftp']
+ # Can't perform any active locale changes during this command, because
+ # setting might not be available at all.
+ leave_locale_alone = True
+
+ def handle(self, app_or_project, name, target=None, **options):
+ self.app_or_project = app_or_project
+ self.paths_to_remove = []
+ self.verbosity = int(options.get('verbosity'))
+
+ self.validate_name(name, app_or_project)
+
+ # if some directory is given, make sure it's nicely expanded
+ if target is None:
+ top_dir = path.join(os.getcwd(), name)
+ try:
+ os.makedirs(top_dir)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ message = "'%s' already exists" % top_dir
+ else:
+ message = e
+ raise CommandError(message)
+ else:
+ top_dir = os.path.abspath(path.expanduser(target))
+ if not os.path.exists(top_dir):
+ raise CommandError("Destination directory '%s' does not "
+ "exist, please create it first." % top_dir)
+
+ extensions = tuple(
+ handle_extensions(options.get('extensions'), ignored=()))
+ extra_files = []
+ for file in options.get('files'):
+ extra_files.extend(map(lambda x: x.strip(), file.split(',')))
+ if self.verbosity >= 2:
+ self.stdout.write("Rendering %s template files with "
+ "extensions: %s\n" %
+ (app_or_project, ', '.join(extensions)))
+ self.stdout.write("Rendering %s template files with "
+ "filenames: %s\n" %
+ (app_or_project, ', '.join(extra_files)))
+
+ base_name = '%s_name' % app_or_project
+ base_subdir = '%s_template' % app_or_project
+ base_directory = '%s_directory' % app_or_project
+ if django.VERSION[-2] != 'final':
+ docs_version = 'dev'
+ else:
+ docs_version = '%d.%d' % django.VERSION[:2]
+
+ context = Context(dict(options, **{
+ base_name: name,
+ base_directory: top_dir,
+ 'docs_version': docs_version,
+ }), autoescape=False)
+
+ # Setup a stub settings environment for template rendering
+ from django.conf import settings
+ if not settings.configured:
+ settings.configure()
+
+ template_dir = self.handle_template(options.get('template'),
+ base_subdir)
+ prefix_length = len(template_dir) + 1
+
+ for root, dirs, files in os.walk(template_dir):
+
+ path_rest = root[prefix_length:]
+ relative_dir = path_rest.replace(base_name, name)
+ if relative_dir:
+ target_dir = path.join(top_dir, relative_dir)
+ if not path.exists(target_dir):
+ os.mkdir(target_dir)
+
+ for dirname in dirs[:]:
+ if dirname.startswith('.') or dirname == '__pycache__':
+ dirs.remove(dirname)
+
+ for filename in files:
+ if filename.endswith(('.pyo', '.pyc', '.py.class')):
+ # Ignore some files as they cause various breakages.
+ continue
+ old_path = path.join(root, filename)
+ new_path = path.join(top_dir, relative_dir,
+ filename.replace(base_name, name))
+ if path.exists(new_path):
+ raise CommandError("%s already exists, overlaying a "
+ "project or app into an existing "
+ "directory won't replace conflicting "
+ "files" % new_path)
+
+ # Only render the Python files, as we don't want to
+ # accidentally render Django templates files
+ with open(old_path, 'rb') as template_file:
+ content = template_file.read()
+ if filename.endswith(extensions) or filename in extra_files:
+ content = content.decode('utf-8')
+ template = Template(content)
+ content = template.render(context)
+ content = content.encode('utf-8')
+ with open(new_path, 'wb') as new_file:
+ new_file.write(content)
+
+ if self.verbosity >= 2:
+ self.stdout.write("Creating %s\n" % new_path)
+ try:
+ shutil.copymode(old_path, new_path)
+ self.make_writeable(new_path)
+ except OSError:
+ self.stderr.write(
+ "Notice: Couldn't set permission bits on %s. You're "
+ "probably using an uncommon filesystem setup. No "
+ "problem." % new_path, self.style.NOTICE)
+
+ if self.paths_to_remove:
+ if self.verbosity >= 2:
+ self.stdout.write("Cleaning up temporary files.\n")
+ for path_to_remove in self.paths_to_remove:
+ if path.isfile(path_to_remove):
+ os.remove(path_to_remove)
+ else:
+ shutil.rmtree(path_to_remove,
+ onerror=rmtree_errorhandler)
+
+ def handle_template(self, template, subdir):
+ """
+ Determines where the app or project templates are.
+ Use django.__path__[0] as the default because we don't
+ know into which directory Django has been installed.
+ """
+ if template is None:
+ return path.join(django.__path__[0], 'conf', subdir)
+ else:
+ if template.startswith('file://'):
+ template = template[7:]
+ expanded_template = path.expanduser(template)
+ expanded_template = path.normpath(expanded_template)
+ if path.isdir(expanded_template):
+ return expanded_template
+ if self.is_url(template):
+ # downloads the file and returns the path
+ absolute_path = self.download(template)
+ else:
+ absolute_path = path.abspath(expanded_template)
+ if path.exists(absolute_path):
+ return self.extract(absolute_path)
+
+ raise CommandError("couldn't handle %s template %s." %
+ (self.app_or_project, template))
+
+ def validate_name(self, name, app_or_project):
+ if name is None:
+ raise CommandError("you must provide %s %s name" % (
+ "an" if app_or_project == "app" else "a", app_or_project))
+ # If it's not a valid directory name.
+ if not re.search(r'^[_a-zA-Z]\w*$', name):
+ # Provide a smart error message, depending on the error.
+ if not re.search(r'^[_a-zA-Z]', name):
+ message = 'make sure the name begins with a letter or underscore'
+ else:
+ message = 'use only numbers, letters and underscores'
+ raise CommandError("%r is not a valid %s name. Please %s." %
+ (name, app_or_project, message))
+
+ def download(self, url):
+ """
+ Downloads the given URL and returns the file name.
+ """
+ def cleanup_url(url):
+ tmp = url.rstrip('/')
+ filename = tmp.split('/')[-1]
+ if url.endswith('/'):
+ display_url = tmp + '/'
+ else:
+ display_url = url
+ return filename, display_url
+
+ prefix = 'django_%s_template_' % self.app_or_project
+ tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_download')
+ self.paths_to_remove.append(tempdir)
+ filename, display_url = cleanup_url(url)
+
+ if self.verbosity >= 2:
+ self.stdout.write("Downloading %s\n" % display_url)
+ try:
+ the_path, info = urlretrieve(url, path.join(tempdir, filename))
+ except IOError as e:
+ raise CommandError("couldn't download URL %s to %s: %s" %
+ (url, filename, e))
+
+ used_name = the_path.split('/')[-1]
+
+ # Trying to get better name from response headers
+ content_disposition = info.get('content-disposition')
+ if content_disposition:
+ _, params = cgi.parse_header(content_disposition)
+ guessed_filename = params.get('filename') or used_name
+ else:
+ guessed_filename = used_name
+
+ # Falling back to content type guessing
+ ext = self.splitext(guessed_filename)[1]
+ content_type = info.get('content-type')
+ if not ext and content_type:
+ ext = mimetypes.guess_extension(content_type)
+ if ext:
+ guessed_filename += ext
+
+ # Move the temporary file to a filename that has better
+ # chances of being recognnized by the archive utils
+ if used_name != guessed_filename:
+ guessed_path = path.join(tempdir, guessed_filename)
+ shutil.move(the_path, guessed_path)
+ return guessed_path
+
+ # Giving up
+ return the_path
+
+ def splitext(self, the_path):
+ """
+ Like os.path.splitext, but takes off .tar, too
+ """
+ base, ext = posixpath.splitext(the_path)
+ if base.lower().endswith('.tar'):
+ ext = base[-4:] + ext
+ base = base[:-4]
+ return base, ext
+
+ def extract(self, filename):
+ """
+ Extracts the given file to a temporarily and returns
+ the path of the directory with the extracted content.
+ """
+ prefix = 'django_%s_template_' % self.app_or_project
+ tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_extract')
+ self.paths_to_remove.append(tempdir)
+ if self.verbosity >= 2:
+ self.stdout.write("Extracting %s\n" % filename)
+ try:
+ archive.extract(filename, tempdir)
+ return tempdir
+ except (archive.ArchiveException, IOError) as e:
+ raise CommandError("couldn't extract file %s to %s: %s" %
+ (filename, tempdir, e))
+
+ def is_url(self, template):
+ """
+ Returns True if the name looks like a URL
+ """
+ if ':' not in template:
+ return False
+ scheme = template.split(':', 1)[0].lower()
+ return scheme in self.url_schemes
+
+ def make_writeable(self, filename):
+ """
+ Make sure that the file is writeable.
+ Useful if our source is read-only.
+ """
+ if sys.platform.startswith('java'):
+ # On Jython there is no os.access()
+ return
+ if not os.access(filename, os.W_OK):
+ st = os.stat(filename)
+ new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR
+ os.chmod(filename, new_permissions)
diff --git a/lib/python2.7/site-packages/django/core/management/utils.py b/lib/python2.7/site-packages/django/core/management/utils.py
new file mode 100644
index 0000000..a8959e8
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/utils.py
@@ -0,0 +1,79 @@
+from __future__ import absolute_import
+
+import os
+from subprocess import PIPE, Popen
+import sys
+
+from django.utils.encoding import force_text, DEFAULT_LOCALE_ENCODING
+from django.utils import six
+
+from .base import CommandError
+
+
+def popen_wrapper(args, os_err_exc_type=CommandError):
+ """
+ Friendly wrapper around Popen.
+
+ Returns stdout output, stderr output and OS status code.
+ """
+ try:
+ p = Popen(args, shell=False, stdout=PIPE, stderr=PIPE,
+ close_fds=os.name != 'nt', universal_newlines=True)
+ except OSError as e:
+ six.reraise(os_err_exc_type, os_err_exc_type('Error executing %s: %s' %
+ (args[0], e.strerror)), sys.exc_info()[2])
+ output, errors = p.communicate()
+ return (
+ output,
+ force_text(errors, DEFAULT_LOCALE_ENCODING, strings_only=True),
+ p.returncode
+ )
+
+
+def handle_extensions(extensions=('html',), ignored=('py',)):
+ """
+ Organizes multiple extensions that are separated with commas or passed by
+ using --extension/-e multiple times. Note that the .py extension is ignored
+ here because of the way non-*.py files are handled in make_messages() (they
+ are copied to file.ext.py files to trick xgettext to parse them as Python
+ files).
+
+ For example: running 'django-admin makemessages -e js,txt -e xhtml -a'
+ would result in an extension list: ['.js', '.txt', '.xhtml']
+
+ >>> handle_extensions(['.html', 'html,js,py,py,py,.py', 'py,.py'])
+ set(['.html', '.js'])
+ >>> handle_extensions(['.html, txt,.tpl'])
+ set(['.html', '.tpl', '.txt'])
+ """
+ ext_list = []
+ for ext in extensions:
+ ext_list.extend(ext.replace(' ', '').split(','))
+ for i, ext in enumerate(ext_list):
+ if not ext.startswith('.'):
+ ext_list[i] = '.%s' % ext_list[i]
+ return set([x for x in ext_list if x.strip('.') not in ignored])
+
+def find_command(cmd, path=None, pathext=None):
+ if path is None:
+ path = os.environ.get('PATH', '').split(os.pathsep)
+ if isinstance(path, six.string_types):
+ path = [path]
+ # check if there are funny path extensions for executables, e.g. Windows
+ if pathext is None:
+ pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD').split(os.pathsep)
+ # don't use extensions if the command ends with one of them
+ for ext in pathext:
+ if cmd.endswith(ext):
+ pathext = ['']
+ break
+ # check if we find the command on PATH
+ for p in path:
+ f = os.path.join(p, cmd)
+ if os.path.isfile(f):
+ return f
+ for ext in pathext:
+ fext = f + ext
+ if os.path.isfile(fext):
+ return fext
+ return None
diff --git a/lib/python2.7/site-packages/django/core/management/validation.py b/lib/python2.7/site-packages/django/core/management/validation.py
new file mode 100644
index 0000000..a64c6e8
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/management/validation.py
@@ -0,0 +1,372 @@
+import collections
+import sys
+
+from django.conf import settings
+from django.core.management.color import color_style
+from django.utils.encoding import force_str
+from django.utils.itercompat import is_iterable
+from django.utils import six
+
+
+class ModelErrorCollection:
+ def __init__(self, outfile=sys.stdout):
+ self.errors = []
+ self.outfile = outfile
+ self.style = color_style()
+
+ def add(self, context, error):
+ self.errors.append((context, error))
+ self.outfile.write(self.style.ERROR(force_str("%s: %s\n" % (context, error))))
+
+
+def get_validation_errors(outfile, app=None):
+ """
+ Validates all models that are part of the specified app. If no app name is provided,
+ validates all models of all installed apps. Writes errors, if any, to outfile.
+ Returns number of errors.
+ """
+ from django.db import models, connection
+ from django.db.models.loading import get_app_errors
+ from django.db.models.deletion import SET_NULL, SET_DEFAULT
+
+ e = ModelErrorCollection(outfile)
+
+ for (app_name, error) in get_app_errors().items():
+ e.add(app_name, error)
+
+ for cls in models.get_models(app, include_swapped=True):
+ opts = cls._meta
+
+ # Check swappable attribute.
+ if opts.swapped:
+ try:
+ app_label, model_name = opts.swapped.split('.')
+ except ValueError:
+ e.add(opts, "%s is not of the form 'app_label.app_name'." % opts.swappable)
+ continue
+ if not models.get_model(app_label, model_name):
+ e.add(opts, "Model has been swapped out for '%s' which has not been installed or is abstract." % opts.swapped)
+ # No need to perform any other validation checks on a swapped model.
+ continue
+
+ # If this is the current User model, check known validation problems with User models
+ if settings.AUTH_USER_MODEL == '%s.%s' % (opts.app_label, opts.object_name):
+ # Check that REQUIRED_FIELDS is a list
+ if not isinstance(cls.REQUIRED_FIELDS, (list, tuple)):
+ e.add(opts, 'The REQUIRED_FIELDS must be a list or tuple.')
+
+ # Check that the USERNAME FIELD isn't included in REQUIRED_FIELDS.
+ if cls.USERNAME_FIELD in cls.REQUIRED_FIELDS:
+ e.add(opts, 'The field named as the USERNAME_FIELD should not be included in REQUIRED_FIELDS on a swappable User model.')
+
+ # Check that the username field is unique
+ if not opts.get_field(cls.USERNAME_FIELD).unique:
+ e.add(opts, 'The USERNAME_FIELD must be unique. Add unique=True to the field parameters.')
+
+ # Model isn't swapped; do field-specific validation.
+ for f in opts.local_fields:
+ if f.name == 'id' and not f.primary_key and opts.pk.name == 'id':
+ e.add(opts, '"%s": You can\'t use "id" as a field name, because each model automatically gets an "id" field if none of the fields have primary_key=True. You need to either remove/rename your "id" field or add primary_key=True to a field.' % f.name)
+ if f.name.endswith('_'):
+ e.add(opts, '"%s": Field names cannot end with underscores, because this would lead to ambiguous queryset filters.' % f.name)
+ if (f.primary_key and f.null and
+ not connection.features.interprets_empty_strings_as_nulls):
+ # We cannot reliably check this for backends like Oracle which
+ # consider NULL and '' to be equal (and thus set up
+ # character-based fields a little differently).
+ e.add(opts, '"%s": Primary key fields cannot have null=True.' % f.name)
+ if isinstance(f, models.CharField):
+ try:
+ max_length = int(f.max_length)
+ if max_length <= 0:
+ e.add(opts, '"%s": CharFields require a "max_length" attribute that is a positive integer.' % f.name)
+ except (ValueError, TypeError):
+ e.add(opts, '"%s": CharFields require a "max_length" attribute that is a positive integer.' % f.name)
+ if isinstance(f, models.DecimalField):
+ decimalp_ok, mdigits_ok = False, False
+ decimalp_msg = '"%s": DecimalFields require a "decimal_places" attribute that is a non-negative integer.'
+ try:
+ decimal_places = int(f.decimal_places)
+ if decimal_places < 0:
+ e.add(opts, decimalp_msg % f.name)
+ else:
+ decimalp_ok = True
+ except (ValueError, TypeError):
+ e.add(opts, decimalp_msg % f.name)
+ mdigits_msg = '"%s": DecimalFields require a "max_digits" attribute that is a positive integer.'
+ try:
+ max_digits = int(f.max_digits)
+ if max_digits <= 0:
+ e.add(opts, mdigits_msg % f.name)
+ else:
+ mdigits_ok = True
+ except (ValueError, TypeError):
+ e.add(opts, mdigits_msg % f.name)
+ invalid_values_msg = '"%s": DecimalFields require a "max_digits" attribute value that is greater than or equal to the value of the "decimal_places" attribute.'
+ if decimalp_ok and mdigits_ok:
+ if decimal_places > max_digits:
+ e.add(opts, invalid_values_msg % f.name)
+ if isinstance(f, models.FileField) and not f.upload_to:
+ e.add(opts, '"%s": FileFields require an "upload_to" attribute.' % f.name)
+ if isinstance(f, models.ImageField):
+ try:
+ from django.utils.image import Image
+ except ImportError:
+ e.add(opts, '"%s": To use ImageFields, you need to install Pillow. Get it at https://pypi.python.org/pypi/Pillow.' % f.name)
+ if isinstance(f, models.BooleanField) and getattr(f, 'null', False):
+ e.add(opts, '"%s": BooleanFields do not accept null values. Use a NullBooleanField instead.' % f.name)
+ if isinstance(f, models.FilePathField) and not (f.allow_files or f.allow_folders):
+ e.add(opts, '"%s": FilePathFields must have either allow_files or allow_folders set to True.' % f.name)
+ if isinstance(f, models.GenericIPAddressField) and not getattr(f, 'null', False) and getattr(f, 'blank', False):
+ e.add(opts, '"%s": GenericIPAddressField can not accept blank values if null values are not allowed, as blank values are stored as null.' % f.name)
+ if f.choices:
+ if isinstance(f.choices, six.string_types) or not is_iterable(f.choices):
+ e.add(opts, '"%s": "choices" should be iterable (e.g., a tuple or list).' % f.name)
+ else:
+ for c in f.choices:
+ if isinstance(c, six.string_types) or not is_iterable(c) or len(c) != 2:
+ e.add(opts, '"%s": "choices" should be a sequence of two-item iterables (e.g. list of 2 item tuples).' % f.name)
+ if f.db_index not in (None, True, False):
+ e.add(opts, '"%s": "db_index" should be either None, True or False.' % f.name)
+
+ # Perform any backend-specific field validation.
+ connection.validation.validate_field(e, opts, f)
+
+ # Check if the on_delete behavior is sane
+ if f.rel and hasattr(f.rel, 'on_delete'):
+ if f.rel.on_delete == SET_NULL and not f.null:
+ e.add(opts, "'%s' specifies on_delete=SET_NULL, but cannot be null." % f.name)
+ elif f.rel.on_delete == SET_DEFAULT and not f.has_default():
+ e.add(opts, "'%s' specifies on_delete=SET_DEFAULT, but has no default value." % f.name)
+
+ # Check to see if the related field will clash with any existing
+ # fields, m2m fields, m2m related objects or related objects
+ if f.rel:
+ if f.rel.to not in models.get_models():
+ # If the related model is swapped, provide a hint;
+ # otherwise, the model just hasn't been installed.
+ if not isinstance(f.rel.to, six.string_types) and f.rel.to._meta.swapped:
+ e.add(opts, "'%s' defines a relation with the model '%s.%s', which has been swapped out. Update the relation to point at settings.%s." % (f.name, f.rel.to._meta.app_label, f.rel.to._meta.object_name, f.rel.to._meta.swappable))
+ else:
+ e.add(opts, "'%s' has a relation with model %s, which has either not been installed or is abstract." % (f.name, f.rel.to))
+ # it is a string and we could not find the model it refers to
+ # so skip the next section
+ if isinstance(f.rel.to, six.string_types):
+ continue
+
+ # Make sure the related field specified by a ForeignKey is unique
+ if f.requires_unique_target:
+ if len(f.foreign_related_fields) > 1:
+ has_unique_field = False
+ for rel_field in f.foreign_related_fields:
+ has_unique_field = has_unique_field or rel_field.unique
+ if not has_unique_field:
+ e.add(opts, "Field combination '%s' under model '%s' must have a unique=True constraint" % (','.join([rel_field.name for rel_field in f.foreign_related_fields]), f.rel.to.__name__))
+ else:
+ if not f.foreign_related_fields[0].unique:
+ e.add(opts, "Field '%s' under model '%s' must have a unique=True constraint." % (f.foreign_related_fields[0].name, f.rel.to.__name__))
+
+ rel_opts = f.rel.to._meta
+ rel_name = f.related.get_accessor_name()
+ rel_query_name = f.related_query_name()
+ if not f.rel.is_hidden():
+ for r in rel_opts.fields:
+ if r.name == rel_name:
+ e.add(opts, "Accessor for field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ if r.name == rel_query_name:
+ e.add(opts, "Reverse query name for field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ for r in rel_opts.local_many_to_many:
+ if r.name == rel_name:
+ e.add(opts, "Accessor for field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ if r.name == rel_query_name:
+ e.add(opts, "Reverse query name for field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ for r in rel_opts.get_all_related_many_to_many_objects():
+ if r.get_accessor_name() == rel_name:
+ e.add(opts, "Accessor for field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ if r.get_accessor_name() == rel_query_name:
+ e.add(opts, "Reverse query name for field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ for r in rel_opts.get_all_related_objects():
+ if r.field is not f:
+ if r.get_accessor_name() == rel_name:
+ e.add(opts, "Accessor for field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ if r.get_accessor_name() == rel_query_name:
+ e.add(opts, "Reverse query name for field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+
+ seen_intermediary_signatures = []
+ for i, f in enumerate(opts.local_many_to_many):
+ # Check to see if the related m2m field will clash with any
+ # existing fields, m2m fields, m2m related objects or related
+ # objects
+ if f.rel.to not in models.get_models():
+ # If the related model is swapped, provide a hint;
+ # otherwise, the model just hasn't been installed.
+ if not isinstance(f.rel.to, six.string_types) and f.rel.to._meta.swapped:
+ e.add(opts, "'%s' defines a relation with the model '%s.%s', which has been swapped out. Update the relation to point at settings.%s." % (f.name, f.rel.to._meta.app_label, f.rel.to._meta.object_name, f.rel.to._meta.swappable))
+ else:
+ e.add(opts, "'%s' has an m2m relation with model %s, which has either not been installed or is abstract." % (f.name, f.rel.to))
+
+ # it is a string and we could not find the model it refers to
+ # so skip the next section
+ if isinstance(f.rel.to, six.string_types):
+ continue
+
+ # Check that the field is not set to unique. ManyToManyFields do not support unique.
+ if f.unique:
+ e.add(opts, "ManyToManyFields cannot be unique. Remove the unique argument on '%s'." % f.name)
+
+ if f.rel.through is not None and not isinstance(f.rel.through, six.string_types):
+ from_model, to_model = cls, f.rel.to
+ if from_model == to_model and f.rel.symmetrical and not f.rel.through._meta.auto_created:
+ e.add(opts, "Many-to-many fields with intermediate tables cannot be symmetrical.")
+ seen_from, seen_to, seen_self = False, False, 0
+ for inter_field in f.rel.through._meta.fields:
+ rel_to = getattr(inter_field.rel, 'to', None)
+ if from_model == to_model: # relation to self
+ if rel_to == from_model:
+ seen_self += 1
+ if seen_self > 2:
+ e.add(opts, "Intermediary model %s has more than "
+ "two foreign keys to %s, which is ambiguous "
+ "and is not permitted." % (
+ f.rel.through._meta.object_name,
+ from_model._meta.object_name
+ )
+ )
+ else:
+ if rel_to == from_model:
+ if seen_from:
+ e.add(opts, "Intermediary model %s has more "
+ "than one foreign key to %s, which is "
+ "ambiguous and is not permitted." % (
+ f.rel.through._meta.object_name,
+ from_model._meta.object_name
+ )
+ )
+ else:
+ seen_from = True
+ elif rel_to == to_model:
+ if seen_to:
+ e.add(opts, "Intermediary model %s has more "
+ "than one foreign key to %s, which is "
+ "ambiguous and is not permitted." % (
+ f.rel.through._meta.object_name,
+ rel_to._meta.object_name
+ )
+ )
+ else:
+ seen_to = True
+ if f.rel.through not in models.get_models(include_auto_created=True):
+ e.add(opts, "'%s' specifies an m2m relation through model "
+ "%s, which has not been installed." % (f.name, f.rel.through)
+ )
+ signature = (f.rel.to, cls, f.rel.through)
+ if signature in seen_intermediary_signatures:
+ e.add(opts, "The model %s has two manually-defined m2m "
+ "relations through the model %s, which is not "
+ "permitted. Please consider using an extra field on "
+ "your intermediary model instead." % (
+ cls._meta.object_name,
+ f.rel.through._meta.object_name
+ )
+ )
+ else:
+ seen_intermediary_signatures.append(signature)
+ if not f.rel.through._meta.auto_created:
+ seen_related_fk, seen_this_fk = False, False
+ for field in f.rel.through._meta.fields:
+ if field.rel:
+ if not seen_related_fk and field.rel.to == f.rel.to:
+ seen_related_fk = True
+ elif field.rel.to == cls:
+ seen_this_fk = True
+ if not seen_related_fk or not seen_this_fk:
+ e.add(opts, "'%s' is a manually-defined m2m relation "
+ "through model %s, which does not have foreign keys "
+ "to %s and %s" % (f.name, f.rel.through._meta.object_name,
+ f.rel.to._meta.object_name, cls._meta.object_name)
+ )
+ elif isinstance(f.rel.through, six.string_types):
+ e.add(opts, "'%s' specifies an m2m relation through model %s, "
+ "which has not been installed" % (f.name, f.rel.through)
+ )
+
+ rel_opts = f.rel.to._meta
+ rel_name = f.related.get_accessor_name()
+ rel_query_name = f.related_query_name()
+ # If rel_name is none, there is no reverse accessor (this only
+ # occurs for symmetrical m2m relations to self). If this is the
+ # case, there are no clashes to check for this field, as there are
+ # no reverse descriptors for this field.
+ if rel_name is not None:
+ for r in rel_opts.fields:
+ if r.name == rel_name:
+ e.add(opts, "Accessor for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ if r.name == rel_query_name:
+ e.add(opts, "Reverse query name for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ for r in rel_opts.local_many_to_many:
+ if r.name == rel_name:
+ e.add(opts, "Accessor for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ if r.name == rel_query_name:
+ e.add(opts, "Reverse query name for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ for r in rel_opts.get_all_related_many_to_many_objects():
+ if r.field is not f:
+ if r.get_accessor_name() == rel_name:
+ e.add(opts, "Accessor for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ if r.get_accessor_name() == rel_query_name:
+ e.add(opts, "Reverse query name for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ for r in rel_opts.get_all_related_objects():
+ if r.get_accessor_name() == rel_name:
+ e.add(opts, "Accessor for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ if r.get_accessor_name() == rel_query_name:
+ e.add(opts, "Reverse query name for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+
+ # Check ordering attribute.
+ if opts.ordering:
+ for field_name in opts.ordering:
+ if field_name == '?':
+ continue
+ if field_name.startswith('-'):
+ field_name = field_name[1:]
+ if opts.order_with_respect_to and field_name == '_order':
+ continue
+ # Skip ordering in the format field1__field2 (FIXME: checking
+ # this format would be nice, but it's a little fiddly).
+ if '__' in field_name:
+ continue
+ # Skip ordering on pk. This is always a valid order_by field
+ # but is an alias and therefore won't be found by opts.get_field.
+ if field_name == 'pk':
+ continue
+ try:
+ opts.get_field(field_name, many_to_many=False)
+ except models.FieldDoesNotExist:
+ e.add(opts, '"ordering" refers to "%s", a field that doesn\'t exist.' % field_name)
+
+ # Check unique_together.
+ for ut in opts.unique_together:
+ validate_local_fields(e, opts, "unique_together", ut)
+ if not isinstance(opts.index_together, collections.Sequence):
+ e.add(opts, '"index_together" must a sequence')
+ else:
+ for it in opts.index_together:
+ validate_local_fields(e, opts, "index_together", it)
+
+ return len(e.errors)
+
+
+def validate_local_fields(e, opts, field_name, fields):
+ from django.db import models
+
+ if not isinstance(fields, collections.Sequence):
+ e.add(opts, 'all %s elements must be sequences' % field_name)
+ else:
+ for field in fields:
+ try:
+ f = opts.get_field(field, many_to_many=True)
+ except models.FieldDoesNotExist:
+ e.add(opts, '"%s" refers to %s, a field that doesn\'t exist.' % (field_name, field))
+ else:
+ if isinstance(f.rel, models.ManyToManyRel):
+ e.add(opts, '"%s" refers to %s. ManyToManyFields are not supported in %s.' % (field_name, f.name, field_name))
+ if f not in opts.local_fields:
+ e.add(opts, '"%s" refers to %s. This is not in the same model as the %s statement.' % (field_name, f.name, field_name))
diff --git a/lib/python2.7/site-packages/django/core/paginator.py b/lib/python2.7/site-packages/django/core/paginator.py
new file mode 100644
index 0000000..c8b9377
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/paginator.py
@@ -0,0 +1,161 @@
+import collections
+from math import ceil
+
+from django.utils import six
+
+
+class InvalidPage(Exception):
+ pass
+
+
+class PageNotAnInteger(InvalidPage):
+ pass
+
+
+class EmptyPage(InvalidPage):
+ pass
+
+
+class Paginator(object):
+
+ def __init__(self, object_list, per_page, orphans=0,
+ allow_empty_first_page=True):
+ self.object_list = object_list
+ self.per_page = int(per_page)
+ self.orphans = int(orphans)
+ self.allow_empty_first_page = allow_empty_first_page
+ self._num_pages = self._count = None
+
+ def validate_number(self, number):
+ """
+ Validates the given 1-based page number.
+ """
+ try:
+ number = int(number)
+ except (TypeError, ValueError):
+ raise PageNotAnInteger('That page number is not an integer')
+ if number < 1:
+ raise EmptyPage('That page number is less than 1')
+ if number > self.num_pages:
+ if number == 1 and self.allow_empty_first_page:
+ pass
+ else:
+ raise EmptyPage('That page contains no results')
+ return number
+
+ def page(self, number):
+ """
+ Returns a Page object for the given 1-based page number.
+ """
+ number = self.validate_number(number)
+ bottom = (number - 1) * self.per_page
+ top = bottom + self.per_page
+ if top + self.orphans >= self.count:
+ top = self.count
+ return self._get_page(self.object_list[bottom:top], number, self)
+
+ def _get_page(self, *args, **kwargs):
+ """
+ Returns an instance of a single page.
+
+ This hook can be used by subclasses to use an alternative to the
+ standard :cls:`Page` object.
+ """
+ return Page(*args, **kwargs)
+
+ def _get_count(self):
+ """
+ Returns the total number of objects, across all pages.
+ """
+ if self._count is None:
+ try:
+ self._count = self.object_list.count()
+ except (AttributeError, TypeError):
+ # AttributeError if object_list has no count() method.
+ # TypeError if object_list.count() requires arguments
+ # (i.e. is of type list).
+ self._count = len(self.object_list)
+ return self._count
+ count = property(_get_count)
+
+ def _get_num_pages(self):
+ """
+ Returns the total number of pages.
+ """
+ if self._num_pages is None:
+ if self.count == 0 and not self.allow_empty_first_page:
+ self._num_pages = 0
+ else:
+ hits = max(1, self.count - self.orphans)
+ self._num_pages = int(ceil(hits / float(self.per_page)))
+ return self._num_pages
+ num_pages = property(_get_num_pages)
+
+ def _get_page_range(self):
+ """
+ Returns a 1-based range of pages for iterating through within
+ a template for loop.
+ """
+ return range(1, self.num_pages + 1)
+ page_range = property(_get_page_range)
+
+
+QuerySetPaginator = Paginator # For backwards-compatibility.
+
+
+class Page(collections.Sequence):
+
+ def __init__(self, object_list, number, paginator):
+ self.object_list = object_list
+ self.number = number
+ self.paginator = paginator
+
+ def __repr__(self):
+ return '<Page %s of %s>' % (self.number, self.paginator.num_pages)
+
+ def __len__(self):
+ return len(self.object_list)
+
+ def __getitem__(self, index):
+ if not isinstance(index, (slice,) + six.integer_types):
+ raise TypeError
+ # The object_list is converted to a list so that if it was a QuerySet
+ # it won't be a database hit per __getitem__.
+ if not isinstance(self.object_list, list):
+ self.object_list = list(self.object_list)
+ return self.object_list[index]
+
+ def has_next(self):
+ return self.number < self.paginator.num_pages
+
+ def has_previous(self):
+ return self.number > 1
+
+ def has_other_pages(self):
+ return self.has_previous() or self.has_next()
+
+ def next_page_number(self):
+ return self.paginator.validate_number(self.number + 1)
+
+ def previous_page_number(self):
+ return self.paginator.validate_number(self.number - 1)
+
+ def start_index(self):
+ """
+ Returns the 1-based index of the first object on this page,
+ relative to total objects in the paginator.
+ """
+ # Special case, return zero if no items.
+ if self.paginator.count == 0:
+ return 0
+ return (self.paginator.per_page * (self.number - 1)) + 1
+
+ def end_index(self):
+ """
+ Returns the 1-based index of the last object on this page,
+ relative to total objects found (hits).
+ """
+ # Special case for the last page because there can be orphans.
+ if self.number == self.paginator.num_pages:
+ return self.paginator.count
+ return self.number * self.paginator.per_page
diff --git a/lib/python2.7/site-packages/django/core/serializers/__init__.py b/lib/python2.7/site-packages/django/core/serializers/__init__.py
new file mode 100644
index 0000000..005f92b
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/serializers/__init__.py
@@ -0,0 +1,148 @@
+"""
+Interfaces for serializing Django objects.
+
+Usage::
+
+ from django.core import serializers
+ json = serializers.serialize("json", some_queryset)
+ objects = list(serializers.deserialize("json", json))
+
+To add your own serializers, use the SERIALIZATION_MODULES setting::
+
+ SERIALIZATION_MODULES = {
+ "csv" : "path.to.csv.serializer",
+ "txt" : "path.to.txt.serializer",
+ }
+
+"""
+
+from django.conf import settings
+from django.utils import importlib
+from django.utils import six
+from django.core.serializers.base import SerializerDoesNotExist
+
+# Built-in serializers
+BUILTIN_SERIALIZERS = {
+ "xml" : "django.core.serializers.xml_serializer",
+ "python" : "django.core.serializers.python",
+ "json" : "django.core.serializers.json",
+ "yaml" : "django.core.serializers.pyyaml",
+}
+
+_serializers = {}
+
+
+class BadSerializer(object):
+ """
+ Stub serializer to hold exception raised during registration
+
+ This allows the serializer registration to cache serializers and if there
+ is an error raised in the process of creating a serializer it will be
+ raised and passed along to the caller when the serializer is used.
+ """
+ internal_use_only = False
+
+ def __init__(self, exception):
+ self.exception = exception
+
+ def __call__(self, *args, **kwargs):
+ raise self.exception
+
+
+def register_serializer(format, serializer_module, serializers=None):
+ """Register a new serializer.
+
+ ``serializer_module`` should be the fully qualified module name
+ for the serializer.
+
+ If ``serializers`` is provided, the registration will be added
+ to the provided dictionary.
+
+ If ``serializers`` is not provided, the registration will be made
+ directly into the global register of serializers. Adding serializers
+ directly is not a thread-safe operation.
+ """
+ if serializers is None and not _serializers:
+ _load_serializers()
+
+ try:
+ module = importlib.import_module(serializer_module)
+ except ImportError as exc:
+ bad_serializer = BadSerializer(exc)
+
+ module = type('BadSerializerModule', (object,), {
+ 'Deserializer': bad_serializer,
+ 'Serializer': bad_serializer,
+ })
+
+ if serializers is None:
+ _serializers[format] = module
+ else:
+ serializers[format] = module
+
+
+def unregister_serializer(format):
+ "Unregister a given serializer. This is not a thread-safe operation."
+ if not _serializers:
+ _load_serializers()
+ if format not in _serializers:
+ raise SerializerDoesNotExist(format)
+ del _serializers[format]
+
+def get_serializer(format):
+ if not _serializers:
+ _load_serializers()
+ if format not in _serializers:
+ raise SerializerDoesNotExist(format)
+ return _serializers[format].Serializer
+
+def get_serializer_formats():
+ if not _serializers:
+ _load_serializers()
+ return list(_serializers)
+
+def get_public_serializer_formats():
+ if not _serializers:
+ _load_serializers()
+ return [k for k, v in six.iteritems(_serializers) if not v.Serializer.internal_use_only]
+
+def get_deserializer(format):
+ if not _serializers:
+ _load_serializers()
+ if format not in _serializers:
+ raise SerializerDoesNotExist(format)
+ return _serializers[format].Deserializer
+
+def serialize(format, queryset, **options):
+ """
+ Serialize a queryset (or any iterator that returns database objects) using
+ a certain serializer.
+ """
+ s = get_serializer(format)()
+ s.serialize(queryset, **options)
+ return s.getvalue()
+
+def deserialize(format, stream_or_string, **options):
+ """
+ Deserialize a stream or a string. Returns an iterator that yields ``(obj,
+ m2m_relation_dict)``, where ``obj`` is a instantiated -- but *unsaved* --
+ object, and ``m2m_relation_dict`` is a dictionary of ``{m2m_field_name :
+ list_of_related_objects}``.
+ """
+ d = get_deserializer(format)
+ return d(stream_or_string, **options)
+
+def _load_serializers():
+ """
+ Register built-in and settings-defined serializers. This is done lazily so
+ that user code has a chance to (e.g.) set up custom settings without
+ needing to be careful of import order.
+ """
+ global _serializers
+ serializers = {}
+ for format in BUILTIN_SERIALIZERS:
+ register_serializer(format, BUILTIN_SERIALIZERS[format], serializers)
+ if hasattr(settings, "SERIALIZATION_MODULES"):
+ for format in settings.SERIALIZATION_MODULES:
+ register_serializer(format, settings.SERIALIZATION_MODULES[format], serializers)
+ _serializers = serializers
diff --git a/lib/python2.7/site-packages/django/core/serializers/base.py b/lib/python2.7/site-packages/django/core/serializers/base.py
new file mode 100644
index 0000000..cd4f7ff
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/serializers/base.py
@@ -0,0 +1,171 @@
+"""
+Module for abstract serializer/unserializer base classes.
+"""
+
+from django.db import models
+from django.utils import six
+
+class SerializerDoesNotExist(KeyError):
+ """The requested serializer was not found."""
+ pass
+
+class SerializationError(Exception):
+ """Something bad happened during serialization."""
+ pass
+
+class DeserializationError(Exception):
+ """Something bad happened during deserialization."""
+ pass
+
+class Serializer(object):
+ """
+ Abstract serializer base class.
+ """
+
+ # Indicates if the implemented serializer is only available for
+ # internal Django use.
+ internal_use_only = False
+
+ def serialize(self, queryset, **options):
+ """
+ Serialize a queryset.
+ """
+ self.options = options
+
+ self.stream = options.pop("stream", six.StringIO())
+ self.selected_fields = options.pop("fields", None)
+ self.use_natural_keys = options.pop("use_natural_keys", False)
+
+ self.start_serialization()
+ self.first = True
+ for obj in queryset:
+ self.start_object(obj)
+ # Use the concrete parent class' _meta instead of the object's _meta
+ # This is to avoid local_fields problems for proxy models. Refs #17717.
+ concrete_model = obj._meta.concrete_model
+ for field in concrete_model._meta.local_fields:
+ if field.serialize:
+ if field.rel is None:
+ if self.selected_fields is None or field.attname in self.selected_fields:
+ self.handle_field(obj, field)
+ else:
+ if self.selected_fields is None or field.attname[:-3] in self.selected_fields:
+ self.handle_fk_field(obj, field)
+ for field in concrete_model._meta.many_to_many:
+ if field.serialize:
+ if self.selected_fields is None or field.attname in self.selected_fields:
+ self.handle_m2m_field(obj, field)
+ self.end_object(obj)
+ if self.first:
+ self.first = False
+ self.end_serialization()
+ return self.getvalue()
+
+ def start_serialization(self):
+ """
+ Called when serializing of the queryset starts.
+ """
+ raise NotImplementedError
+
+ def end_serialization(self):
+ """
+ Called when serializing of the queryset ends.
+ """
+ pass
+
+ def start_object(self, obj):
+ """
+ Called when serializing of an object starts.
+ """
+ raise NotImplementedError
+
+ def end_object(self, obj):
+ """
+ Called when serializing of an object ends.
+ """
+ pass
+
+ def handle_field(self, obj, field):
+ """
+ Called to handle each individual (non-relational) field on an object.
+ """
+ raise NotImplementedError
+
+ def handle_fk_field(self, obj, field):
+ """
+ Called to handle a ForeignKey field.
+ """
+ raise NotImplementedError
+
+ def handle_m2m_field(self, obj, field):
+ """
+ Called to handle a ManyToManyField.
+ """
+ raise NotImplementedError
+
+ def getvalue(self):
+ """
+ Return the fully serialized queryset (or None if the output stream is
+ not seekable).
+ """
+ if callable(getattr(self.stream, 'getvalue', None)):
+ return self.stream.getvalue()
+
+class Deserializer(six.Iterator):
+ """
+ Abstract base deserializer class.
+ """
+
+ def __init__(self, stream_or_string, **options):
+ """
+ Init this serializer given a stream or a string
+ """
+ self.options = options
+ if isinstance(stream_or_string, six.string_types):
+ self.stream = six.StringIO(stream_or_string)
+ else:
+ self.stream = stream_or_string
+ # hack to make sure that the models have all been loaded before
+ # deserialization starts (otherwise subclass calls to get_model()
+ # and friends might fail...)
+ models.get_apps()
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ """Iteration iterface -- return the next item in the stream"""
+ raise NotImplementedError
+
+class DeserializedObject(object):
+ """
+ A deserialized model.
+
+ Basically a container for holding the pre-saved deserialized data along
+ with the many-to-many data saved with the object.
+
+ Call ``save()`` to save the object (with the many-to-many data) to the
+ database; call ``save(save_m2m=False)`` to save just the object fields
+ (and not touch the many-to-many stuff.)
+ """
+
+ def __init__(self, obj, m2m_data=None):
+ self.object = obj
+ self.m2m_data = m2m_data
+
+ def __repr__(self):
+ return "<DeserializedObject: %s.%s(pk=%s)>" % (
+ self.object._meta.app_label, self.object._meta.object_name, self.object.pk)
+
+ def save(self, save_m2m=True, using=None):
+ # Call save on the Model baseclass directly. This bypasses any
+ # model-defined save. The save is also forced to be raw.
+ # raw=True is passed to any pre/post_save signals.
+ models.Model.save_base(self.object, using=using, raw=True)
+ if self.m2m_data and save_m2m:
+ for accessor_name, object_list in self.m2m_data.items():
+ setattr(self.object, accessor_name, object_list)
+
+ # prevent a second (possibly accidental) call to save() from saving
+ # the m2m data twice.
+ self.m2m_data = None
diff --git a/lib/python2.7/site-packages/django/core/serializers/json.py b/lib/python2.7/site-packages/django/core/serializers/json.py
new file mode 100644
index 0000000..64357bf
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/serializers/json.py
@@ -0,0 +1,108 @@
+"""
+Serialize data to/from JSON
+"""
+
+# Avoid shadowing the standard library json module
+from __future__ import absolute_import
+
+import datetime
+import decimal
+import json
+import sys
+
+from django.core.serializers.base import DeserializationError
+from django.core.serializers.python import Serializer as PythonSerializer
+from django.core.serializers.python import Deserializer as PythonDeserializer
+from django.utils import six
+from django.utils.timezone import is_aware
+
+class Serializer(PythonSerializer):
+ """
+ Convert a queryset to JSON.
+ """
+ internal_use_only = False
+
+ def start_serialization(self):
+ if json.__version__.split('.') >= ['2', '1', '3']:
+ # Use JS strings to represent Python Decimal instances (ticket #16850)
+ self.options.update({'use_decimal': False})
+ self._current = None
+ self.json_kwargs = self.options.copy()
+ self.json_kwargs.pop('stream', None)
+ self.json_kwargs.pop('fields', None)
+ self.stream.write("[")
+
+ def end_serialization(self):
+ if self.options.get("indent"):
+ self.stream.write("\n")
+ self.stream.write("]")
+ if self.options.get("indent"):
+ self.stream.write("\n")
+
+ def end_object(self, obj):
+ # self._current has the field data
+ indent = self.options.get("indent")
+ if not self.first:
+ self.stream.write(",")
+ if not indent:
+ self.stream.write(" ")
+ if indent:
+ self.stream.write("\n")
+ json.dump(self.get_dump_object(obj), self.stream,
+ cls=DjangoJSONEncoder, **self.json_kwargs)
+ self._current = None
+
+ def getvalue(self):
+ # Grand-parent super
+ return super(PythonSerializer, self).getvalue()
+
+
+def Deserializer(stream_or_string, **options):
+ """
+ Deserialize a stream or string of JSON data.
+ """
+ if not isinstance(stream_or_string, (bytes, six.string_types)):
+ stream_or_string = stream_or_string.read()
+ if isinstance(stream_or_string, bytes):
+ stream_or_string = stream_or_string.decode('utf-8')
+ try:
+ objects = json.loads(stream_or_string)
+ for obj in PythonDeserializer(objects, **options):
+ yield obj
+ except GeneratorExit:
+ raise
+ except Exception as e:
+ # Map to deserializer error
+ six.reraise(DeserializationError, DeserializationError(e), sys.exc_info()[2])
+
+
+class DjangoJSONEncoder(json.JSONEncoder):
+ """
+ JSONEncoder subclass that knows how to encode date/time and decimal types.
+ """
+ def default(self, o):
+ # See "Date Time String Format" in the ECMA-262 specification.
+ if isinstance(o, datetime.datetime):
+ r = o.isoformat()
+ if o.microsecond:
+ r = r[:23] + r[26:]
+ if r.endswith('+00:00'):
+ r = r[:-6] + 'Z'
+ return r
+ elif isinstance(o, datetime.date):
+ return o.isoformat()
+ elif isinstance(o, datetime.time):
+ if is_aware(o):
+ raise ValueError("JSON can't represent timezone-aware times.")
+ r = o.isoformat()
+ if o.microsecond:
+ r = r[:12]
+ return r
+ elif isinstance(o, decimal.Decimal):
+ return str(o)
+ else:
+ return super(DjangoJSONEncoder, self).default(o)
+
+# Older, deprecated class name (for backwards compatibility purposes).
+DateTimeAwareJSONEncoder = DjangoJSONEncoder
+
diff --git a/lib/python2.7/site-packages/django/core/serializers/python.py b/lib/python2.7/site-packages/django/core/serializers/python.py
new file mode 100644
index 0000000..cdfac50
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/serializers/python.py
@@ -0,0 +1,154 @@
+"""
+A Python "serializer". Doesn't do much serializing per se -- just converts to
+and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for
+other serializers.
+"""
+from __future__ import unicode_literals
+
+from django.conf import settings
+from django.core.serializers import base
+from django.db import models, DEFAULT_DB_ALIAS
+from django.utils.encoding import smart_text, is_protected_type
+from django.utils import six
+
+
+class Serializer(base.Serializer):
+ """
+ Serializes a QuerySet to basic Python objects.
+ """
+
+ internal_use_only = True
+
+ def start_serialization(self):
+ self._current = None
+ self.objects = []
+
+ def end_serialization(self):
+ pass
+
+ def start_object(self, obj):
+ self._current = {}
+
+ def end_object(self, obj):
+ self.objects.append(self.get_dump_object(obj))
+ self._current = None
+
+ def get_dump_object(self, obj):
+ return {
+ "pk": smart_text(obj._get_pk_val(), strings_only=True),
+ "model": smart_text(obj._meta),
+ "fields": self._current
+ }
+
+ def handle_field(self, obj, field):
+ value = field._get_val_from_obj(obj)
+ # Protected types (i.e., primitives like None, numbers, dates,
+ # and Decimals) are passed through as is. All other values are
+ # converted to string first.
+ if is_protected_type(value):
+ self._current[field.name] = value
+ else:
+ self._current[field.name] = field.value_to_string(obj)
+
+ def handle_fk_field(self, obj, field):
+ if self.use_natural_keys and hasattr(field.rel.to, 'natural_key'):
+ related = getattr(obj, field.name)
+ if related:
+ value = related.natural_key()
+ else:
+ value = None
+ else:
+ value = getattr(obj, field.get_attname())
+ self._current[field.name] = value
+
+ def handle_m2m_field(self, obj, field):
+ if field.rel.through._meta.auto_created:
+ if self.use_natural_keys and hasattr(field.rel.to, 'natural_key'):
+ m2m_value = lambda value: value.natural_key()
+ else:
+ m2m_value = lambda value: smart_text(value._get_pk_val(), strings_only=True)
+ self._current[field.name] = [m2m_value(related)
+ for related in getattr(obj, field.name).iterator()]
+
+ def getvalue(self):
+ return self.objects
+
+
+def Deserializer(object_list, **options):
+ """
+ Deserialize simple Python objects back into Django ORM instances.
+
+ It's expected that you pass the Python objects themselves (instead of a
+ stream or a string) to the constructor
+ """
+ db = options.pop('using', DEFAULT_DB_ALIAS)
+ ignore = options.pop('ignorenonexistent', False)
+
+ models.get_apps()
+ for d in object_list:
+ # Look up the model and starting build a dict of data for it.
+ Model = _get_model(d["model"])
+ data = {Model._meta.pk.attname: Model._meta.pk.to_python(d.get("pk", None))}
+ m2m_data = {}
+ model_fields = Model._meta.get_all_field_names()
+
+ # Handle each field
+ for (field_name, field_value) in six.iteritems(d["fields"]):
+
+ if ignore and field_name not in model_fields:
+ # skip fields no longer on model
+ continue
+
+ if isinstance(field_value, str):
+ field_value = smart_text(field_value, options.get("encoding", settings.DEFAULT_CHARSET), strings_only=True)
+
+ field = Model._meta.get_field(field_name)
+
+ # Handle M2M relations
+ if field.rel and isinstance(field.rel, models.ManyToManyRel):
+ if hasattr(field.rel.to._default_manager, 'get_by_natural_key'):
+ def m2m_convert(value):
+ if hasattr(value, '__iter__') and not isinstance(value, six.text_type):
+ return field.rel.to._default_manager.db_manager(db).get_by_natural_key(*value).pk
+ else:
+ return smart_text(field.rel.to._meta.pk.to_python(value))
+ else:
+ m2m_convert = lambda v: smart_text(field.rel.to._meta.pk.to_python(v))
+ m2m_data[field.name] = [m2m_convert(pk) for pk in field_value]
+
+ # Handle FK fields
+ elif field.rel and isinstance(field.rel, models.ManyToOneRel):
+ if field_value is not None:
+ if hasattr(field.rel.to._default_manager, 'get_by_natural_key'):
+ if hasattr(field_value, '__iter__') and not isinstance(field_value, six.text_type):
+ obj = field.rel.to._default_manager.db_manager(db).get_by_natural_key(*field_value)
+ value = getattr(obj, field.rel.field_name)
+ # If this is a natural foreign key to an object that
+ # has a FK/O2O as the foreign key, use the FK value
+ if field.rel.to._meta.pk.rel:
+ value = value.pk
+ else:
+ value = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
+ data[field.attname] = value
+ else:
+ data[field.attname] = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
+ else:
+ data[field.attname] = None
+
+ # Handle all other fields
+ else:
+ data[field.name] = field.to_python(field_value)
+
+ yield base.DeserializedObject(Model(**data), m2m_data)
+
+def _get_model(model_identifier):
+ """
+ Helper to look up a model from an "app_label.model_name" string.
+ """
+ try:
+ Model = models.get_model(*model_identifier.split("."))
+ except TypeError:
+ Model = None
+ if Model is None:
+ raise base.DeserializationError("Invalid model identifier: '%s'" % model_identifier)
+ return Model
diff --git a/lib/python2.7/site-packages/django/core/serializers/pyyaml.py b/lib/python2.7/site-packages/django/core/serializers/pyyaml.py
new file mode 100644
index 0000000..478f14b
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/serializers/pyyaml.py
@@ -0,0 +1,75 @@
+"""
+YAML serializer.
+
+Requires PyYaml (http://pyyaml.org/), but that's checked for in __init__.
+"""
+
+import decimal
+import yaml
+import sys
+from io import StringIO
+
+from django.db import models
+from django.core.serializers.base import DeserializationError
+from django.core.serializers.python import Serializer as PythonSerializer
+from django.core.serializers.python import Deserializer as PythonDeserializer
+from django.utils import six
+
+# Use the C (faster) implementation if possible
+try:
+ from yaml import CSafeLoader as SafeLoader
+ from yaml import CSafeDumper as SafeDumper
+except ImportError:
+ from yaml import SafeLoader, SafeDumper
+
+
+class DjangoSafeDumper(SafeDumper):
+ def represent_decimal(self, data):
+ return self.represent_scalar('tag:yaml.org,2002:str', str(data))
+
+DjangoSafeDumper.add_representer(decimal.Decimal, DjangoSafeDumper.represent_decimal)
+
+class Serializer(PythonSerializer):
+ """
+ Convert a queryset to YAML.
+ """
+
+ internal_use_only = False
+
+ def handle_field(self, obj, field):
+ # A nasty special case: base YAML doesn't support serialization of time
+ # types (as opposed to dates or datetimes, which it does support). Since
+ # we want to use the "safe" serializer for better interoperability, we
+ # need to do something with those pesky times. Converting 'em to strings
+ # isn't perfect, but it's better than a "!!python/time" type which would
+ # halt deserialization under any other language.
+ if isinstance(field, models.TimeField) and getattr(obj, field.name) is not None:
+ self._current[field.name] = str(getattr(obj, field.name))
+ else:
+ super(Serializer, self).handle_field(obj, field)
+
+ def end_serialization(self):
+ yaml.dump(self.objects, self.stream, Dumper=DjangoSafeDumper, **self.options)
+
+ def getvalue(self):
+ # Grand-parent super
+ return super(PythonSerializer, self).getvalue()
+
+def Deserializer(stream_or_string, **options):
+ """
+ Deserialize a stream or string of YAML data.
+ """
+ if isinstance(stream_or_string, bytes):
+ stream_or_string = stream_or_string.decode('utf-8')
+ if isinstance(stream_or_string, six.string_types):
+ stream = StringIO(stream_or_string)
+ else:
+ stream = stream_or_string
+ try:
+ for obj in PythonDeserializer(yaml.load(stream, Loader=SafeLoader), **options):
+ yield obj
+ except GeneratorExit:
+ raise
+ except Exception as e:
+ # Map to deserializer error
+ six.reraise(DeserializationError, DeserializationError(e), sys.exc_info()[2])
diff --git a/lib/python2.7/site-packages/django/core/serializers/xml_serializer.py b/lib/python2.7/site-packages/django/core/serializers/xml_serializer.py
new file mode 100644
index 0000000..988c336
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/serializers/xml_serializer.py
@@ -0,0 +1,391 @@
+"""
+XML serializer.
+"""
+
+from __future__ import unicode_literals
+
+from django.conf import settings
+from django.core.serializers import base
+from django.db import models, DEFAULT_DB_ALIAS
+from django.utils.xmlutils import SimplerXMLGenerator
+from django.utils.encoding import smart_text
+from xml.dom import pulldom
+from xml.sax import handler
+from xml.sax.expatreader import ExpatParser as _ExpatParser
+
+class Serializer(base.Serializer):
+ """
+ Serializes a QuerySet to XML.
+ """
+
+ def indent(self, level):
+ if self.options.get('indent', None) is not None:
+ self.xml.ignorableWhitespace('\n' + ' ' * self.options.get('indent', None) * level)
+
+ def start_serialization(self):
+ """
+ Start serialization -- open the XML document and the root element.
+ """
+ self.xml = SimplerXMLGenerator(self.stream, self.options.get("encoding", settings.DEFAULT_CHARSET))
+ self.xml.startDocument()
+ self.xml.startElement("django-objects", {"version" : "1.0"})
+
+ def end_serialization(self):
+ """
+ End serialization -- end the document.
+ """
+ self.indent(0)
+ self.xml.endElement("django-objects")
+ self.xml.endDocument()
+
+ def start_object(self, obj):
+ """
+ Called as each object is handled.
+ """
+ if not hasattr(obj, "_meta"):
+ raise base.SerializationError("Non-model object (%s) encountered during serialization" % type(obj))
+
+ self.indent(1)
+ obj_pk = obj._get_pk_val()
+ if obj_pk is None:
+ attrs = {"model": smart_text(obj._meta),}
+ else:
+ attrs = {
+ "pk": smart_text(obj._get_pk_val()),
+ "model": smart_text(obj._meta),
+ }
+
+ self.xml.startElement("object", attrs)
+
+ def end_object(self, obj):
+ """
+ Called after handling all fields for an object.
+ """
+ self.indent(1)
+ self.xml.endElement("object")
+
+ def handle_field(self, obj, field):
+ """
+ Called to handle each field on an object (except for ForeignKeys and
+ ManyToManyFields)
+ """
+ self.indent(2)
+ self.xml.startElement("field", {
+ "name" : field.name,
+ "type" : field.get_internal_type()
+ })
+
+ # Get a "string version" of the object's data.
+ if getattr(obj, field.name) is not None:
+ self.xml.characters(field.value_to_string(obj))
+ else:
+ self.xml.addQuickElement("None")
+
+ self.xml.endElement("field")
+
+ def handle_fk_field(self, obj, field):
+ """
+ Called to handle a ForeignKey (we need to treat them slightly
+ differently from regular fields).
+ """
+ self._start_relational_field(field)
+ related_att = getattr(obj, field.get_attname())
+ if related_att is not None:
+ if self.use_natural_keys and hasattr(field.rel.to, 'natural_key'):
+ related = getattr(obj, field.name)
+ # If related object has a natural key, use it
+ related = related.natural_key()
+ # Iterable natural keys are rolled out as subelements
+ for key_value in related:
+ self.xml.startElement("natural", {})
+ self.xml.characters(smart_text(key_value))
+ self.xml.endElement("natural")
+ else:
+ self.xml.characters(smart_text(related_att))
+ else:
+ self.xml.addQuickElement("None")
+ self.xml.endElement("field")
+
+ def handle_m2m_field(self, obj, field):
+ """
+ Called to handle a ManyToManyField. Related objects are only
+ serialized as references to the object's PK (i.e. the related *data*
+ is not dumped, just the relation).
+ """
+ if field.rel.through._meta.auto_created:
+ self._start_relational_field(field)
+ if self.use_natural_keys and hasattr(field.rel.to, 'natural_key'):
+ # If the objects in the m2m have a natural key, use it
+ def handle_m2m(value):
+ natural = value.natural_key()
+ # Iterable natural keys are rolled out as subelements
+ self.xml.startElement("object", {})
+ for key_value in natural:
+ self.xml.startElement("natural", {})
+ self.xml.characters(smart_text(key_value))
+ self.xml.endElement("natural")
+ self.xml.endElement("object")
+ else:
+ def handle_m2m(value):
+ self.xml.addQuickElement("object", attrs={
+ 'pk' : smart_text(value._get_pk_val())
+ })
+ for relobj in getattr(obj, field.name).iterator():
+ handle_m2m(relobj)
+
+ self.xml.endElement("field")
+
+ def _start_relational_field(self, field):
+ """
+ Helper to output the <field> element for relational fields
+ """
+ self.indent(2)
+ self.xml.startElement("field", {
+ "name" : field.name,
+ "rel" : field.rel.__class__.__name__,
+ "to" : smart_text(field.rel.to._meta),
+ })
+
+class Deserializer(base.Deserializer):
+ """
+ Deserialize XML.
+ """
+
+ def __init__(self, stream_or_string, **options):
+ super(Deserializer, self).__init__(stream_or_string, **options)
+ self.event_stream = pulldom.parse(self.stream, self._make_parser())
+ self.db = options.pop('using', DEFAULT_DB_ALIAS)
+ self.ignore = options.pop('ignorenonexistent', False)
+
+ def _make_parser(self):
+ """Create a hardened XML parser (no custom/external entities)."""
+ return DefusedExpatParser()
+
+ def __next__(self):
+ for event, node in self.event_stream:
+ if event == "START_ELEMENT" and node.nodeName == "object":
+ self.event_stream.expandNode(node)
+ return self._handle_object(node)
+ raise StopIteration
+
+ def _handle_object(self, node):
+ """
+ Convert an <object> node to a DeserializedObject.
+ """
+ # Look up the model using the model loading mechanism. If this fails,
+ # bail.
+ Model = self._get_model_from_node(node, "model")
+
+ # Start building a data dictionary from the object.
+ # If the node is missing the pk set it to None
+ if node.hasAttribute("pk"):
+ pk = node.getAttribute("pk")
+ else:
+ pk = None
+
+ data = {Model._meta.pk.attname : Model._meta.pk.to_python(pk)}
+
+ # Also start building a dict of m2m data (this is saved as
+ # {m2m_accessor_attribute : [list_of_related_objects]})
+ m2m_data = {}
+
+ model_fields = Model._meta.get_all_field_names()
+ # Deseralize each field.
+ for field_node in node.getElementsByTagName("field"):
+ # If the field is missing the name attribute, bail (are you
+ # sensing a pattern here?)
+ field_name = field_node.getAttribute("name")
+ if not field_name:
+ raise base.DeserializationError("<field> node is missing the 'name' attribute")
+
+ # Get the field from the Model. This will raise a
+ # FieldDoesNotExist if, well, the field doesn't exist, which will
+ # be propagated correctly unless ignorenonexistent=True is used.
+ if self.ignore and field_name not in model_fields:
+ continue
+ field = Model._meta.get_field(field_name)
+
+ # As is usually the case, relation fields get the special treatment.
+ if field.rel and isinstance(field.rel, models.ManyToManyRel):
+ m2m_data[field.name] = self._handle_m2m_field_node(field_node, field)
+ elif field.rel and isinstance(field.rel, models.ManyToOneRel):
+ data[field.attname] = self._handle_fk_field_node(field_node, field)
+ else:
+ if field_node.getElementsByTagName('None'):
+ value = None
+ else:
+ value = field.to_python(getInnerText(field_node).strip())
+ data[field.name] = value
+
+ # Return a DeserializedObject so that the m2m data has a place to live.
+ return base.DeserializedObject(Model(**data), m2m_data)
+
+ def _handle_fk_field_node(self, node, field):
+ """
+ Handle a <field> node for a ForeignKey
+ """
+ # Check if there is a child node named 'None', returning None if so.
+ if node.getElementsByTagName('None'):
+ return None
+ else:
+ if hasattr(field.rel.to._default_manager, 'get_by_natural_key'):
+ keys = node.getElementsByTagName('natural')
+ if keys:
+ # If there are 'natural' subelements, it must be a natural key
+ field_value = [getInnerText(k).strip() for k in keys]
+ obj = field.rel.to._default_manager.db_manager(self.db).get_by_natural_key(*field_value)
+ obj_pk = getattr(obj, field.rel.field_name)
+ # If this is a natural foreign key to an object that
+ # has a FK/O2O as the foreign key, use the FK value
+ if field.rel.to._meta.pk.rel:
+ obj_pk = obj_pk.pk
+ else:
+ # Otherwise, treat like a normal PK
+ field_value = getInnerText(node).strip()
+ obj_pk = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
+ return obj_pk
+ else:
+ field_value = getInnerText(node).strip()
+ return field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
+
+ def _handle_m2m_field_node(self, node, field):
+ """
+ Handle a <field> node for a ManyToManyField.
+ """
+ if hasattr(field.rel.to._default_manager, 'get_by_natural_key'):
+ def m2m_convert(n):
+ keys = n.getElementsByTagName('natural')
+ if keys:
+ # If there are 'natural' subelements, it must be a natural key
+ field_value = [getInnerText(k).strip() for k in keys]
+ obj_pk = field.rel.to._default_manager.db_manager(self.db).get_by_natural_key(*field_value).pk
+ else:
+ # Otherwise, treat like a normal PK value.
+ obj_pk = field.rel.to._meta.pk.to_python(n.getAttribute('pk'))
+ return obj_pk
+ else:
+ m2m_convert = lambda n: field.rel.to._meta.pk.to_python(n.getAttribute('pk'))
+ return [m2m_convert(c) for c in node.getElementsByTagName("object")]
+
+ def _get_model_from_node(self, node, attr):
+ """
+ Helper to look up a model from a <object model=...> or a <field
+ rel=... to=...> node.
+ """
+ model_identifier = node.getAttribute(attr)
+ if not model_identifier:
+ raise base.DeserializationError(
+ "<%s> node is missing the required '%s' attribute" \
+ % (node.nodeName, attr))
+ try:
+ Model = models.get_model(*model_identifier.split("."))
+ except TypeError:
+ Model = None
+ if Model is None:
+ raise base.DeserializationError(
+ "<%s> node has invalid model identifier: '%s'" % \
+ (node.nodeName, model_identifier))
+ return Model
+
+
+def getInnerText(node):
+ """
+ Get all the inner text of a DOM node (recursively).
+ """
+ # inspired by http://mail.python.org/pipermail/xml-sig/2005-March/011022.html
+ inner_text = []
+ for child in node.childNodes:
+ if child.nodeType == child.TEXT_NODE or child.nodeType == child.CDATA_SECTION_NODE:
+ inner_text.append(child.data)
+ elif child.nodeType == child.ELEMENT_NODE:
+ inner_text.extend(getInnerText(child))
+ else:
+ pass
+ return "".join(inner_text)
+
+
+# Below code based on Christian Heimes' defusedxml
+
+
+class DefusedExpatParser(_ExpatParser):
+ """
+ An expat parser hardened against XML bomb attacks.
+
+ Forbids DTDs, external entity references
+
+ """
+ def __init__(self, *args, **kwargs):
+ _ExpatParser.__init__(self, *args, **kwargs)
+ self.setFeature(handler.feature_external_ges, False)
+ self.setFeature(handler.feature_external_pes, False)
+
+ def start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
+ raise DTDForbidden(name, sysid, pubid)
+
+ def entity_decl(self, name, is_parameter_entity, value, base,
+ sysid, pubid, notation_name):
+ raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
+
+ def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
+ # expat 1.2
+ raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name)
+
+ def external_entity_ref_handler(self, context, base, sysid, pubid):
+ raise ExternalReferenceForbidden(context, base, sysid, pubid)
+
+ def reset(self):
+ _ExpatParser.reset(self)
+ parser = self._parser
+ parser.StartDoctypeDeclHandler = self.start_doctype_decl
+ parser.EntityDeclHandler = self.entity_decl
+ parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl
+ parser.ExternalEntityRefHandler = self.external_entity_ref_handler
+
+
+class DefusedXmlException(ValueError):
+ """Base exception."""
+ def __repr__(self):
+ return str(self)
+
+
+class DTDForbidden(DefusedXmlException):
+ """Document type definition is forbidden."""
+ def __init__(self, name, sysid, pubid):
+ super(DTDForbidden, self).__init__()
+ self.name = name
+ self.sysid = sysid
+ self.pubid = pubid
+
+ def __str__(self):
+ tpl = "DTDForbidden(name='{}', system_id={!r}, public_id={!r})"
+ return tpl.format(self.name, self.sysid, self.pubid)
+
+
+class EntitiesForbidden(DefusedXmlException):
+ """Entity definition is forbidden."""
+ def __init__(self, name, value, base, sysid, pubid, notation_name):
+ super(EntitiesForbidden, self).__init__()
+ self.name = name
+ self.value = value
+ self.base = base
+ self.sysid = sysid
+ self.pubid = pubid
+ self.notation_name = notation_name
+
+ def __str__(self):
+ tpl = "EntitiesForbidden(name='{}', system_id={!r}, public_id={!r})"
+ return tpl.format(self.name, self.sysid, self.pubid)
+
+
+class ExternalReferenceForbidden(DefusedXmlException):
+ """Resolving an external reference is forbidden."""
+ def __init__(self, context, base, sysid, pubid):
+ super(ExternalReferenceForbidden, self).__init__()
+ self.context = context
+ self.base = base
+ self.sysid = sysid
+ self.pubid = pubid
+
+ def __str__(self):
+ tpl = "ExternalReferenceForbidden(system_id='{}', public_id={})"
+ return tpl.format(self.sysid, self.pubid)
diff --git a/lib/python2.7/site-packages/django/core/servers/__init__.py b/lib/python2.7/site-packages/django/core/servers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/servers/__init__.py
diff --git a/lib/python2.7/site-packages/django/core/servers/basehttp.py b/lib/python2.7/site-packages/django/core/servers/basehttp.py
new file mode 100644
index 0000000..010b5e0
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/servers/basehttp.py
@@ -0,0 +1,169 @@
+"""
+HTTP server that implements the Python WSGI protocol (PEP 333, rev 1.21).
+
+Based on wsgiref.simple_server which is part of the standard library since 2.5.
+
+This is a simple server for use in testing or debugging Django apps. It hasn't
+been reviewed for security issues. DON'T USE IT FOR PRODUCTION USE!
+"""
+
+from __future__ import unicode_literals
+
+from io import BytesIO
+import socket
+import sys
+import traceback
+from wsgiref import simple_server
+from wsgiref.util import FileWrapper # for backwards compatibility
+
+from django.core.management.color import color_style
+from django.core.wsgi import get_wsgi_application
+from django.utils.module_loading import import_by_path
+from django.utils import six
+from django.utils.six.moves.urllib.parse import urljoin
+from django.utils.six.moves import socketserver
+
+__all__ = ('WSGIServer', 'WSGIRequestHandler', 'MAX_SOCKET_CHUNK_SIZE')
+
+
+# If data is too large, socket will choke, so write chunks no larger than 32MB
+# at a time. The rationale behind the 32MB can be found on Django's Trac:
+# https://code.djangoproject.com/ticket/5596#comment:4
+MAX_SOCKET_CHUNK_SIZE = 32 * 1024 * 1024 # 32 MB
+
+
+def get_internal_wsgi_application():
+ """
+ Loads and returns the WSGI application as configured by the user in
+ ``settings.WSGI_APPLICATION``. With the default ``startproject`` layout,
+ this will be the ``application`` object in ``projectname/wsgi.py``.
+
+ This function, and the ``WSGI_APPLICATION`` setting itself, are only useful
+ for Django's internal servers (runserver, runfcgi); external WSGI servers
+ should just be configured to point to the correct application object
+ directly.
+
+ If settings.WSGI_APPLICATION is not set (is ``None``), we just return
+ whatever ``django.core.wsgi.get_wsgi_application`` returns.
+
+ """
+ from django.conf import settings
+ app_path = getattr(settings, 'WSGI_APPLICATION')
+ if app_path is None:
+ return get_wsgi_application()
+
+ return import_by_path(
+ app_path,
+ error_prefix="WSGI application '%s' could not be loaded; " % app_path
+ )
+
+
+class ServerHandler(simple_server.ServerHandler, object):
+ error_status = str("500 INTERNAL SERVER ERROR")
+
+ def write(self, data):
+ """'write()' callable as specified by PEP 3333"""
+
+ assert isinstance(data, bytes), "write() argument must be bytestring"
+
+ if not self.status:
+ raise AssertionError("write() before start_response()")
+
+ elif not self.headers_sent:
+ # Before the first output, send the stored headers
+ self.bytes_sent = len(data) # make sure we know content-length
+ self.send_headers()
+ else:
+ self.bytes_sent += len(data)
+
+ # XXX check Content-Length and truncate if too many bytes written?
+ data = BytesIO(data)
+ for chunk in iter(lambda: data.read(MAX_SOCKET_CHUNK_SIZE), b''):
+ self._write(chunk)
+ self._flush()
+
+ def error_output(self, environ, start_response):
+ super(ServerHandler, self).error_output(environ, start_response)
+ return ['\n'.join(traceback.format_exception(*sys.exc_info()))]
+
+ # Backport of http://hg.python.org/cpython/rev/d5af1b235dab. See #16241.
+ # This can be removed when support for Python <= 2.7.3 is deprecated.
+ def finish_response(self):
+ try:
+ if not self.result_is_file() or not self.sendfile():
+ for data in self.result:
+ self.write(data)
+ self.finish_content()
+ finally:
+ self.close()
+
+
+class WSGIServer(simple_server.WSGIServer, object):
+ """BaseHTTPServer that implements the Python WSGI protocol"""
+
+ request_queue_size = 10
+
+ def __init__(self, *args, **kwargs):
+ if kwargs.pop('ipv6', False):
+ self.address_family = socket.AF_INET6
+ super(WSGIServer, self).__init__(*args, **kwargs)
+
+ def server_bind(self):
+ """Override server_bind to store the server name."""
+ super(WSGIServer, self).server_bind()
+ self.setup_environ()
+
+
+class WSGIRequestHandler(simple_server.WSGIRequestHandler, object):
+
+ def __init__(self, *args, **kwargs):
+ from django.conf import settings
+ self.admin_static_prefix = urljoin(settings.STATIC_URL, 'admin/')
+ # We set self.path to avoid crashes in log_message() on unsupported
+ # requests (like "OPTIONS").
+ self.path = ''
+ self.style = color_style()
+ super(WSGIRequestHandler, self).__init__(*args, **kwargs)
+
+ def address_string(self):
+ # Short-circuit parent method to not call socket.getfqdn
+ return self.client_address[0]
+
+ def log_message(self, format, *args):
+ # Don't bother logging requests for admin images or the favicon.
+ if (self.path.startswith(self.admin_static_prefix)
+ or self.path == '/favicon.ico'):
+ return
+
+ msg = "[%s] %s\n" % (self.log_date_time_string(), format % args)
+
+ # Utilize terminal colors, if available
+ if args[1][0] == '2':
+ # Put 2XX first, since it should be the common case
+ msg = self.style.HTTP_SUCCESS(msg)
+ elif args[1][0] == '1':
+ msg = self.style.HTTP_INFO(msg)
+ elif args[1] == '304':
+ msg = self.style.HTTP_NOT_MODIFIED(msg)
+ elif args[1][0] == '3':
+ msg = self.style.HTTP_REDIRECT(msg)
+ elif args[1] == '404':
+ msg = self.style.HTTP_NOT_FOUND(msg)
+ elif args[1][0] == '4':
+ msg = self.style.HTTP_BAD_REQUEST(msg)
+ else:
+ # Any 5XX, or any other response
+ msg = self.style.HTTP_SERVER_ERROR(msg)
+
+ sys.stderr.write(msg)
+
+
+def run(addr, port, wsgi_handler, ipv6=False, threading=False):
+ server_address = (addr, port)
+ if threading:
+ httpd_cls = type(str('WSGIServer'), (socketserver.ThreadingMixIn, WSGIServer), {})
+ else:
+ httpd_cls = WSGIServer
+ httpd = httpd_cls(server_address, WSGIRequestHandler, ipv6=ipv6)
+ httpd.set_app(wsgi_handler)
+ httpd.serve_forever()
diff --git a/lib/python2.7/site-packages/django/core/servers/fastcgi.py b/lib/python2.7/site-packages/django/core/servers/fastcgi.py
new file mode 100644
index 0000000..2ae1fa5
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/servers/fastcgi.py
@@ -0,0 +1,185 @@
+"""
+FastCGI (or SCGI, or AJP1.3 ...) server that implements the WSGI protocol.
+
+Uses the flup python package: http://www.saddi.com/software/flup/
+
+This is a adaptation of the flup package to add FastCGI server support
+to run Django apps from Web servers that support the FastCGI protocol.
+This module can be run standalone or from the django-admin / manage.py
+scripts using the "runfcgi" directive.
+
+Run with the extra option "help" for a list of additional options you can
+pass to this server.
+"""
+
+import os
+import sys
+from django.utils import importlib
+
+__version__ = "0.1"
+__all__ = ["runfastcgi"]
+
+FASTCGI_OPTIONS = {
+ 'protocol': 'fcgi',
+ 'host': None,
+ 'port': None,
+ 'socket': None,
+ 'method': 'fork',
+ 'daemonize': None,
+ 'workdir': '/',
+ 'pidfile': None,
+ 'maxspare': 5,
+ 'minspare': 2,
+ 'maxchildren': 50,
+ 'maxrequests': 0,
+ 'debug': None,
+ 'outlog': None,
+ 'errlog': None,
+ 'umask': None,
+}
+
+FASTCGI_HELP = r"""
+ Run this project as a fastcgi (or some other protocol supported
+ by flup) application. To do this, the flup package from
+ http://www.saddi.com/software/flup/ is required.
+
+ runfcgi [options] [fcgi settings]
+
+Optional Fcgi settings: (setting=value)
+ protocol=PROTOCOL fcgi, scgi, ajp, ... (default %(protocol)s)
+ host=HOSTNAME hostname to listen on.
+ port=PORTNUM port to listen on.
+ socket=FILE UNIX socket to listen on.
+ method=IMPL prefork or threaded (default %(method)s).
+ maxrequests=NUMBER number of requests a child handles before it is
+ killed and a new child is forked (0 = no limit).
+ maxspare=NUMBER max number of spare processes / threads (default %(maxspare)s).
+ minspare=NUMBER min number of spare processes / threads (default %(minspare)s).
+ maxchildren=NUMBER hard limit number of processes / threads (default %(maxchildren)s).
+ daemonize=BOOL whether to detach from terminal.
+ pidfile=FILE write the spawned process-id to this file.
+ workdir=DIRECTORY change to this directory when daemonizing (default %(workdir)s).
+ debug=BOOL set to true to enable flup tracebacks.
+ outlog=FILE write stdout to this file.
+ errlog=FILE write stderr to this file.
+ umask=UMASK umask to use when daemonizing, in octal notation (default 022).
+
+Examples:
+ Run a "standard" fastcgi process on a file-descriptor
+ (for Web servers which spawn your processes for you)
+ $ manage.py runfcgi method=threaded
+
+ Run a scgi server on a TCP host/port
+ $ manage.py runfcgi protocol=scgi method=prefork host=127.0.0.1 port=8025
+
+ Run a fastcgi server on a UNIX domain socket (posix platforms only)
+ $ manage.py runfcgi method=prefork socket=/tmp/fcgi.sock
+
+ Run a fastCGI as a daemon and write the spawned PID in a file
+ $ manage.py runfcgi socket=/tmp/fcgi.sock method=prefork \
+ daemonize=true pidfile=/var/run/django-fcgi.pid
+
+""" % FASTCGI_OPTIONS
+
+def fastcgi_help(message=None):
+ print(FASTCGI_HELP)
+ if message:
+ print(message)
+ return False
+
+def runfastcgi(argset=[], **kwargs):
+ options = FASTCGI_OPTIONS.copy()
+ options.update(kwargs)
+ for x in argset:
+ if "=" in x:
+ k, v = x.split('=', 1)
+ else:
+ k, v = x, True
+ options[k.lower()] = v
+
+ if "help" in options:
+ return fastcgi_help()
+
+ try:
+ import flup
+ except ImportError as e:
+ sys.stderr.write("ERROR: %s\n" % e)
+ sys.stderr.write(" Unable to load the flup package. In order to run django\n")
+ sys.stderr.write(" as a FastCGI application, you will need to get flup from\n")
+ sys.stderr.write(" http://www.saddi.com/software/flup/ If you've already\n")
+ sys.stderr.write(" installed flup, then make sure you have it in your PYTHONPATH.\n")
+ return False
+
+ flup_module = 'server.' + options['protocol']
+
+ if options['method'] in ('prefork', 'fork'):
+ wsgi_opts = {
+ 'maxSpare': int(options["maxspare"]),
+ 'minSpare': int(options["minspare"]),
+ 'maxChildren': int(options["maxchildren"]),
+ 'maxRequests': int(options["maxrequests"]),
+ }
+ flup_module += '_fork'
+ elif options['method'] in ('thread', 'threaded'):
+ wsgi_opts = {
+ 'maxSpare': int(options["maxspare"]),
+ 'minSpare': int(options["minspare"]),
+ 'maxThreads': int(options["maxchildren"]),
+ }
+ else:
+ return fastcgi_help("ERROR: Implementation must be one of prefork or "
+ "thread.")
+
+ wsgi_opts['debug'] = options['debug'] is not None
+
+ try:
+ module = importlib.import_module('.%s' % flup_module, 'flup')
+ WSGIServer = module.WSGIServer
+ except Exception:
+ print("Can't import flup." + flup_module)
+ return False
+
+ # Prep up and go
+ from django.core.servers.basehttp import get_internal_wsgi_application
+
+ if options["host"] and options["port"] and not options["socket"]:
+ wsgi_opts['bindAddress'] = (options["host"], int(options["port"]))
+ elif options["socket"] and not options["host"] and not options["port"]:
+ wsgi_opts['bindAddress'] = options["socket"]
+ elif not options["socket"] and not options["host"] and not options["port"]:
+ wsgi_opts['bindAddress'] = None
+ else:
+ return fastcgi_help("Invalid combination of host, port, socket.")
+
+ if options["daemonize"] is None:
+ # Default to daemonizing if we're running on a socket/named pipe.
+ daemonize = (wsgi_opts['bindAddress'] is not None)
+ else:
+ if options["daemonize"].lower() in ('true', 'yes', 't'):
+ daemonize = True
+ elif options["daemonize"].lower() in ('false', 'no', 'f'):
+ daemonize = False
+ else:
+ return fastcgi_help("ERROR: Invalid option for daemonize "
+ "parameter.")
+
+ daemon_kwargs = {}
+ if options['outlog']:
+ daemon_kwargs['out_log'] = options['outlog']
+ if options['errlog']:
+ daemon_kwargs['err_log'] = options['errlog']
+ if options['umask']:
+ daemon_kwargs['umask'] = int(options['umask'], 8)
+
+ if daemonize:
+ from django.utils.daemonize import become_daemon
+ become_daemon(our_home_dir=options["workdir"], **daemon_kwargs)
+
+ if options["pidfile"]:
+ with open(options["pidfile"], "w") as fp:
+ fp.write("%d\n" % os.getpid())
+
+ WSGIServer(get_internal_wsgi_application(), **wsgi_opts).run()
+
+if __name__ == '__main__':
+ runfastcgi(sys.argv[1:])
diff --git a/lib/python2.7/site-packages/django/core/signals.py b/lib/python2.7/site-packages/django/core/signals.py
new file mode 100644
index 0000000..a14af00
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/signals.py
@@ -0,0 +1,5 @@
+from django.dispatch import Signal
+
+request_started = Signal()
+request_finished = Signal()
+got_request_exception = Signal(providing_args=["request"])
diff --git a/lib/python2.7/site-packages/django/core/signing.py b/lib/python2.7/site-packages/django/core/signing.py
new file mode 100644
index 0000000..c3b2c3e
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/signing.py
@@ -0,0 +1,199 @@
+"""
+Functions for creating and restoring url-safe signed JSON objects.
+
+The format used looks like this:
+
+>>> signing.dumps("hello")
+'ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk'
+
+There are two components here, separated by a ':'. The first component is a
+URLsafe base64 encoded JSON of the object passed to dumps(). The second
+component is a base64 encoded hmac/SHA1 hash of "$first_component:$secret"
+
+signing.loads(s) checks the signature and returns the deserialised object.
+If the signature fails, a BadSignature exception is raised.
+
+>>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk")
+u'hello'
+>>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk-modified")
+...
+BadSignature: Signature failed: ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk-modified
+
+You can optionally compress the JSON prior to base64 encoding it to save
+space, using the compress=True argument. This checks if compression actually
+helps and only applies compression if the result is a shorter string:
+
+>>> signing.dumps(range(1, 20), compress=True)
+'.eJwFwcERACAIwLCF-rCiILN47r-GyZVJsNgkxaFxoDgxcOHGxMKD_T7vhAml:1QaUaL:BA0thEZrp4FQVXIXuOvYJtLJSrQ'
+
+The fact that the string is compressed is signalled by the prefixed '.' at the
+start of the base64 JSON.
+
+There are 65 url-safe characters: the 64 used by url-safe base64 and the ':'.
+These functions make use of all of them.
+"""
+
+from __future__ import unicode_literals
+
+import base64
+import json
+import time
+import zlib
+
+from django.conf import settings
+from django.utils import baseconv
+from django.utils.crypto import constant_time_compare, salted_hmac
+from django.utils.encoding import force_bytes, force_str, force_text
+from django.utils.module_loading import import_by_path
+
+
+class BadSignature(Exception):
+ """
+ Signature does not match
+ """
+ pass
+
+
+class SignatureExpired(BadSignature):
+ """
+ Signature timestamp is older than required max_age
+ """
+ pass
+
+
+def b64_encode(s):
+ return base64.urlsafe_b64encode(s).strip(b'=')
+
+
+def b64_decode(s):
+ pad = b'=' * (-len(s) % 4)
+ return base64.urlsafe_b64decode(s + pad)
+
+
+def base64_hmac(salt, value, key):
+ return b64_encode(salted_hmac(salt, value, key).digest())
+
+
+def get_cookie_signer(salt='django.core.signing.get_cookie_signer'):
+ Signer = import_by_path(settings.SIGNING_BACKEND)
+ return Signer('django.http.cookies' + settings.SECRET_KEY, salt=salt)
+
+
+class JSONSerializer(object):
+ """
+ Simple wrapper around json to be used in signing.dumps and
+ signing.loads.
+ """
+ def dumps(self, obj):
+ return json.dumps(obj, separators=(',', ':')).encode('latin-1')
+
+ def loads(self, data):
+ return json.loads(data.decode('latin-1'))
+
+
+def dumps(obj, key=None, salt='django.core.signing', serializer=JSONSerializer, compress=False):
+ """
+ Returns URL-safe, sha1 signed base64 compressed JSON string. If key is
+ None, settings.SECRET_KEY is used instead.
+
+ If compress is True (not the default) checks if compressing using zlib can
+ save some space. Prepends a '.' to signify compression. This is included
+ in the signature, to protect against zip bombs.
+
+ Salt can be used to namespace the hash, so that a signed string is
+ only valid for a given namespace. Leaving this at the default
+ value or re-using a salt value across different parts of your
+ application without good cause is a security risk.
+
+ The serializer is expected to return a bytestring.
+ """
+ data = serializer().dumps(obj)
+
+ # Flag for if it's been compressed or not
+ is_compressed = False
+
+ if compress:
+ # Avoid zlib dependency unless compress is being used
+ compressed = zlib.compress(data)
+ if len(compressed) < (len(data) - 1):
+ data = compressed
+ is_compressed = True
+ base64d = b64_encode(data)
+ if is_compressed:
+ base64d = b'.' + base64d
+ return TimestampSigner(key, salt=salt).sign(base64d)
+
+
+def loads(s, key=None, salt='django.core.signing', serializer=JSONSerializer, max_age=None):
+ """
+ Reverse of dumps(), raises BadSignature if signature fails.
+
+ The serializer is expected to accept a bytestring.
+ """
+ # TimestampSigner.unsign always returns unicode but base64 and zlib
+ # compression operate on bytes.
+ base64d = force_bytes(TimestampSigner(key, salt=salt).unsign(s, max_age=max_age))
+ decompress = False
+ if base64d[:1] == b'.':
+ # It's compressed; uncompress it first
+ base64d = base64d[1:]
+ decompress = True
+ data = b64_decode(base64d)
+ if decompress:
+ data = zlib.decompress(data)
+ return serializer().loads(data)
+
+
+class Signer(object):
+
+ def __init__(self, key=None, sep=':', salt=None):
+ # Use of native strings in all versions of Python
+ self.sep = str(sep)
+ self.key = str(key or settings.SECRET_KEY)
+ self.salt = str(salt or
+ '%s.%s' % (self.__class__.__module__, self.__class__.__name__))
+
+ def signature(self, value):
+ signature = base64_hmac(self.salt + 'signer', value, self.key)
+ # Convert the signature from bytes to str only on Python 3
+ return force_str(signature)
+
+ def sign(self, value):
+ value = force_str(value)
+ return str('%s%s%s') % (value, self.sep, self.signature(value))
+
+ def unsign(self, signed_value):
+ signed_value = force_str(signed_value)
+ if not self.sep in signed_value:
+ raise BadSignature('No "%s" found in value' % self.sep)
+ value, sig = signed_value.rsplit(self.sep, 1)
+ if constant_time_compare(sig, self.signature(value)):
+ return force_text(value)
+ raise BadSignature('Signature "%s" does not match' % sig)
+
+
+class TimestampSigner(Signer):
+
+ def timestamp(self):
+ return baseconv.base62.encode(int(time.time()))
+
+ def sign(self, value):
+ value = force_str(value)
+ value = str('%s%s%s') % (value, self.sep, self.timestamp())
+ return super(TimestampSigner, self).sign(value)
+
+ def unsign(self, value, max_age=None):
+ """
+ Retrieve original value and check it wasn't signed more
+ than max_age seconds ago.
+ """
+ result = super(TimestampSigner, self).unsign(value)
+ value, timestamp = result.rsplit(self.sep, 1)
+ timestamp = baseconv.base62.decode(timestamp)
+ if max_age is not None:
+ # Check timestamp is not older than max_age
+ age = time.time() - timestamp
+ if age > max_age:
+ raise SignatureExpired(
+ 'Signature age %s > %s seconds' % (age, max_age))
+ return value
diff --git a/lib/python2.7/site-packages/django/core/urlresolvers.py b/lib/python2.7/site-packages/django/core/urlresolvers.py
new file mode 100644
index 0000000..2aba375
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/urlresolvers.py
@@ -0,0 +1,595 @@
+"""
+This module converts requested URLs to callback view functions.
+
+RegexURLResolver is the main class here. Its resolve() method takes a URL (as
+a string) and returns a tuple in this format:
+
+ (view_function, function_args, function_kwargs)
+"""
+from __future__ import unicode_literals
+
+import re
+from threading import local
+
+from django.http import Http404
+from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
+from django.utils.datastructures import MultiValueDict
+from django.utils.encoding import force_str, force_text, iri_to_uri
+from django.utils.functional import memoize, lazy
+from django.utils.http import urlquote
+from django.utils.importlib import import_module
+from django.utils.module_loading import module_has_submodule
+from django.utils.regex_helper import normalize
+from django.utils import six
+from django.utils.translation import get_language
+
+
+_resolver_cache = {} # Maps URLconf modules to RegexURLResolver instances.
+_ns_resolver_cache = {} # Maps namespaces to RegexURLResolver instances.
+_callable_cache = {} # Maps view and url pattern names to their view functions.
+
+# SCRIPT_NAME prefixes for each thread are stored here. If there's no entry for
+# the current thread (which is the only one we ever access), it is assumed to
+# be empty.
+_prefixes = local()
+
+# Overridden URLconfs for each thread are stored here.
+_urlconfs = local()
+
+
+class ResolverMatch(object):
+ def __init__(self, func, args, kwargs, url_name=None, app_name=None, namespaces=None):
+ self.func = func
+ self.args = args
+ self.kwargs = kwargs
+ self.app_name = app_name
+ if namespaces:
+ self.namespaces = [x for x in namespaces if x]
+ else:
+ self.namespaces = []
+ if not url_name:
+ if not hasattr(func, '__name__'):
+ # An instance of a callable class
+ url_name = '.'.join([func.__class__.__module__, func.__class__.__name__])
+ else:
+ # A function
+ url_name = '.'.join([func.__module__, func.__name__])
+ self.url_name = url_name
+
+ @property
+ def namespace(self):
+ return ':'.join(self.namespaces)
+
+ @property
+ def view_name(self):
+ return ':'.join([ x for x in [ self.namespace, self.url_name ] if x ])
+
+ def __getitem__(self, index):
+ return (self.func, self.args, self.kwargs)[index]
+
+ def __repr__(self):
+ return "ResolverMatch(func=%s, args=%s, kwargs=%s, url_name='%s', app_name='%s', namespace='%s')" % (
+ self.func, self.args, self.kwargs, self.url_name, self.app_name, self.namespace)
+
+class Resolver404(Http404):
+ pass
+
+class NoReverseMatch(Exception):
+ pass
+
+def get_callable(lookup_view, can_fail=False):
+ """
+ Convert a string version of a function name to the callable object.
+
+ If the lookup_view is not an import path, it is assumed to be a URL pattern
+ label and the original string is returned.
+
+ If can_fail is True, lookup_view might be a URL pattern label, so errors
+ during the import fail and the string is returned.
+ """
+ if not callable(lookup_view):
+ mod_name, func_name = get_mod_func(lookup_view)
+ if func_name == '':
+ return lookup_view
+
+ try:
+ mod = import_module(mod_name)
+ except ImportError:
+ parentmod, submod = get_mod_func(mod_name)
+ if (not can_fail and submod != '' and
+ not module_has_submodule(import_module(parentmod), submod)):
+ raise ViewDoesNotExist(
+ "Could not import %s. Parent module %s does not exist." %
+ (lookup_view, mod_name))
+ if not can_fail:
+ raise
+ else:
+ try:
+ lookup_view = getattr(mod, func_name)
+ if not callable(lookup_view):
+ raise ViewDoesNotExist(
+ "Could not import %s.%s. View is not callable." %
+ (mod_name, func_name))
+ except AttributeError:
+ if not can_fail:
+ raise ViewDoesNotExist(
+ "Could not import %s. View does not exist in module %s." %
+ (lookup_view, mod_name))
+ return lookup_view
+get_callable = memoize(get_callable, _callable_cache, 1)
+
+def get_resolver(urlconf):
+ if urlconf is None:
+ from django.conf import settings
+ urlconf = settings.ROOT_URLCONF
+ return RegexURLResolver(r'^/', urlconf)
+get_resolver = memoize(get_resolver, _resolver_cache, 1)
+
+def get_ns_resolver(ns_pattern, resolver):
+ # Build a namespaced resolver for the given parent urlconf pattern.
+ # This makes it possible to have captured parameters in the parent
+ # urlconf pattern.
+ ns_resolver = RegexURLResolver(ns_pattern,
+ resolver.url_patterns)
+ return RegexURLResolver(r'^/', [ns_resolver])
+get_ns_resolver = memoize(get_ns_resolver, _ns_resolver_cache, 2)
+
+def get_mod_func(callback):
+ # Converts 'django.views.news.stories.story_detail' to
+ # ['django.views.news.stories', 'story_detail']
+ try:
+ dot = callback.rindex('.')
+ except ValueError:
+ return callback, ''
+ return callback[:dot], callback[dot+1:]
+
+class LocaleRegexProvider(object):
+ """
+ A mixin to provide a default regex property which can vary by active
+ language.
+
+ """
+ def __init__(self, regex):
+ # regex is either a string representing a regular expression, or a
+ # translatable string (using ugettext_lazy) representing a regular
+ # expression.
+ self._regex = regex
+ self._regex_dict = {}
+
+
+ @property
+ def regex(self):
+ """
+ Returns a compiled regular expression, depending upon the activated
+ language-code.
+ """
+ language_code = get_language()
+ if language_code not in self._regex_dict:
+ if isinstance(self._regex, six.string_types):
+ regex = self._regex
+ else:
+ regex = force_text(self._regex)
+ try:
+ compiled_regex = re.compile(regex, re.UNICODE)
+ except re.error as e:
+ raise ImproperlyConfigured(
+ '"%s" is not a valid regular expression: %s' %
+ (regex, six.text_type(e)))
+
+ self._regex_dict[language_code] = compiled_regex
+ return self._regex_dict[language_code]
+
+
+class RegexURLPattern(LocaleRegexProvider):
+ def __init__(self, regex, callback, default_args=None, name=None):
+ LocaleRegexProvider.__init__(self, regex)
+ # callback is either a string like 'foo.views.news.stories.story_detail'
+ # which represents the path to a module and a view function name, or a
+ # callable object (view).
+ if callable(callback):
+ self._callback = callback
+ else:
+ self._callback = None
+ self._callback_str = callback
+ self.default_args = default_args or {}
+ self.name = name
+
+ def __repr__(self):
+ return force_str('<%s %s %s>' % (self.__class__.__name__, self.name, self.regex.pattern))
+
+ def add_prefix(self, prefix):
+ """
+ Adds the prefix string to a string-based callback.
+ """
+ if not prefix or not hasattr(self, '_callback_str'):
+ return
+ self._callback_str = prefix + '.' + self._callback_str
+
+ def resolve(self, path):
+ match = self.regex.search(path)
+ if match:
+ # If there are any named groups, use those as kwargs, ignoring
+ # non-named groups. Otherwise, pass all non-named arguments as
+ # positional arguments.
+ kwargs = match.groupdict()
+ if kwargs:
+ args = ()
+ else:
+ args = match.groups()
+ # In both cases, pass any extra_kwargs as **kwargs.
+ kwargs.update(self.default_args)
+
+ return ResolverMatch(self.callback, args, kwargs, self.name)
+
+ @property
+ def callback(self):
+ if self._callback is not None:
+ return self._callback
+
+ self._callback = get_callable(self._callback_str)
+ return self._callback
+
+class RegexURLResolver(LocaleRegexProvider):
+ def __init__(self, regex, urlconf_name, default_kwargs=None, app_name=None, namespace=None):
+ LocaleRegexProvider.__init__(self, regex)
+ # urlconf_name is a string representing the module containing URLconfs.
+ self.urlconf_name = urlconf_name
+ if not isinstance(urlconf_name, six.string_types):
+ self._urlconf_module = self.urlconf_name
+ self.callback = None
+ self.default_kwargs = default_kwargs or {}
+ self.namespace = namespace
+ self.app_name = app_name
+ self._reverse_dict = {}
+ self._namespace_dict = {}
+ self._app_dict = {}
+ # set of dotted paths to all functions and classes that are used in
+ # urlpatterns
+ self._callback_strs = set()
+ self._populated = False
+
+ def __repr__(self):
+ if isinstance(self.urlconf_name, list) and len(self.urlconf_name):
+ # Don't bother to output the whole list, it can be huge
+ urlconf_repr = '<%s list>' % self.urlconf_name[0].__class__.__name__
+ else:
+ urlconf_repr = repr(self.urlconf_name)
+ return str('<%s %s (%s:%s) %s>') % (
+ self.__class__.__name__, urlconf_repr, self.app_name,
+ self.namespace, self.regex.pattern)
+
+ def _populate(self):
+ lookups = MultiValueDict()
+ namespaces = {}
+ apps = {}
+ language_code = get_language()
+ for pattern in reversed(self.url_patterns):
+ if hasattr(pattern, '_callback_str'):
+ self._callback_strs.add(pattern._callback_str)
+ elif hasattr(pattern, '_callback'):
+ callback = pattern._callback
+ if not hasattr(callback, '__name__'):
+ lookup_str = callback.__module__ + "." + callback.__class__.__name__
+ else:
+ lookup_str = callback.__module__ + "." + callback.__name__
+ self._callback_strs.add(lookup_str)
+ p_pattern = pattern.regex.pattern
+ if p_pattern.startswith('^'):
+ p_pattern = p_pattern[1:]
+ if isinstance(pattern, RegexURLResolver):
+ if pattern.namespace:
+ namespaces[pattern.namespace] = (p_pattern, pattern)
+ if pattern.app_name:
+ apps.setdefault(pattern.app_name, []).append(pattern.namespace)
+ else:
+ parent = normalize(pattern.regex.pattern)
+ for name in pattern.reverse_dict:
+ for matches, pat, defaults in pattern.reverse_dict.getlist(name):
+ new_matches = []
+ for piece, p_args in parent:
+ new_matches.extend([(piece + suffix, p_args + args) for (suffix, args) in matches])
+ lookups.appendlist(name, (new_matches, p_pattern + pat, dict(defaults, **pattern.default_kwargs)))
+ for namespace, (prefix, sub_pattern) in pattern.namespace_dict.items():
+ namespaces[namespace] = (p_pattern + prefix, sub_pattern)
+ for app_name, namespace_list in pattern.app_dict.items():
+ apps.setdefault(app_name, []).extend(namespace_list)
+ self._callback_strs.update(pattern._callback_strs)
+ else:
+ bits = normalize(p_pattern)
+ lookups.appendlist(pattern.callback, (bits, p_pattern, pattern.default_args))
+ if pattern.name is not None:
+ lookups.appendlist(pattern.name, (bits, p_pattern, pattern.default_args))
+ self._reverse_dict[language_code] = lookups
+ self._namespace_dict[language_code] = namespaces
+ self._app_dict[language_code] = apps
+ self._populated = True
+
+ @property
+ def reverse_dict(self):
+ language_code = get_language()
+ if language_code not in self._reverse_dict:
+ self._populate()
+ return self._reverse_dict[language_code]
+
+ @property
+ def namespace_dict(self):
+ language_code = get_language()
+ if language_code not in self._namespace_dict:
+ self._populate()
+ return self._namespace_dict[language_code]
+
+ @property
+ def app_dict(self):
+ language_code = get_language()
+ if language_code not in self._app_dict:
+ self._populate()
+ return self._app_dict[language_code]
+
+ def resolve(self, path):
+ tried = []
+ match = self.regex.search(path)
+ if match:
+ new_path = path[match.end():]
+ for pattern in self.url_patterns:
+ try:
+ sub_match = pattern.resolve(new_path)
+ except Resolver404 as e:
+ sub_tried = e.args[0].get('tried')
+ if sub_tried is not None:
+ tried.extend([[pattern] + t for t in sub_tried])
+ else:
+ tried.append([pattern])
+ else:
+ if sub_match:
+ sub_match_dict = dict(match.groupdict(), **self.default_kwargs)
+ sub_match_dict.update(sub_match.kwargs)
+ return ResolverMatch(sub_match.func, sub_match.args, sub_match_dict, sub_match.url_name, self.app_name or sub_match.app_name, [self.namespace] + sub_match.namespaces)
+ tried.append([pattern])
+ raise Resolver404({'tried': tried, 'path': new_path})
+ raise Resolver404({'path' : path})
+
+ @property
+ def urlconf_module(self):
+ try:
+ return self._urlconf_module
+ except AttributeError:
+ self._urlconf_module = import_module(self.urlconf_name)
+ return self._urlconf_module
+
+ @property
+ def url_patterns(self):
+ patterns = getattr(self.urlconf_module, "urlpatterns", self.urlconf_module)
+ try:
+ iter(patterns)
+ except TypeError:
+ raise ImproperlyConfigured("The included urlconf %s doesn't have any patterns in it" % self.urlconf_name)
+ return patterns
+
+ def _resolve_special(self, view_type):
+ callback = getattr(self.urlconf_module, 'handler%s' % view_type, None)
+ if not callback:
+ # No handler specified in file; use default
+ # Lazy import, since django.urls imports this file
+ from django.conf import urls
+ callback = getattr(urls, 'handler%s' % view_type)
+ return get_callable(callback), {}
+
+ def resolve400(self):
+ return self._resolve_special('400')
+
+ def resolve403(self):
+ return self._resolve_special('403')
+
+ def resolve404(self):
+ return self._resolve_special('404')
+
+ def resolve500(self):
+ return self._resolve_special('500')
+
+ def reverse(self, lookup_view, *args, **kwargs):
+ return self._reverse_with_prefix(lookup_view, '', *args, **kwargs)
+
+ def _reverse_with_prefix(self, lookup_view, _prefix, *args, **kwargs):
+ if args and kwargs:
+ raise ValueError("Don't mix *args and **kwargs in call to reverse()!")
+ text_args = [force_text(v) for v in args]
+ text_kwargs = dict((k, force_text(v)) for (k, v) in kwargs.items())
+
+ if not self._populated:
+ self._populate()
+
+ try:
+ if lookup_view in self._callback_strs:
+ lookup_view = get_callable(lookup_view, True)
+ except (ImportError, AttributeError) as e:
+ raise NoReverseMatch("Error importing '%s': %s." % (lookup_view, e))
+ possibilities = self.reverse_dict.getlist(lookup_view)
+
+ prefix_norm, prefix_args = normalize(urlquote(_prefix))[0]
+ for possibility, pattern, defaults in possibilities:
+ for result, params in possibility:
+ if args:
+ if len(args) != len(params) + len(prefix_args):
+ continue
+ candidate_subs = dict(zip(prefix_args + params, text_args))
+ else:
+ if set(kwargs.keys()) | set(defaults.keys()) != set(params) | set(defaults.keys()) | set(prefix_args):
+ continue
+ matches = True
+ for k, v in defaults.items():
+ if kwargs.get(k, v) != v:
+ matches = False
+ break
+ if not matches:
+ continue
+ candidate_subs = text_kwargs
+ # WSGI provides decoded URLs, without %xx escapes, and the URL
+ # resolver operates on such URLs. First substitute arguments
+ # without quoting to build a decoded URL and look for a match.
+ # Then, if we have a match, redo the substitution with quoted
+ # arguments in order to return a properly encoded URL.
+ candidate_pat = prefix_norm.replace('%', '%%') + result
+ if re.search('^%s%s' % (prefix_norm, pattern), candidate_pat % candidate_subs, re.UNICODE):
+ candidate_subs = dict((k, urlquote(v)) for (k, v) in candidate_subs.items())
+ return candidate_pat % candidate_subs
+ # lookup_view can be URL label, or dotted path, or callable, Any of
+ # these can be passed in at the top, but callables are not friendly in
+ # error messages.
+ m = getattr(lookup_view, '__module__', None)
+ n = getattr(lookup_view, '__name__', None)
+ if m is not None and n is not None:
+ lookup_view_s = "%s.%s" % (m, n)
+ else:
+ lookup_view_s = lookup_view
+
+ patterns = [pattern for (possibility, pattern, defaults) in possibilities]
+ raise NoReverseMatch("Reverse for '%s' with arguments '%s' and keyword "
+ "arguments '%s' not found. %d pattern(s) tried: %s" %
+ (lookup_view_s, args, kwargs, len(patterns), patterns))
+
+class LocaleRegexURLResolver(RegexURLResolver):
+ """
+ A URL resolver that always matches the active language code as URL prefix.
+
+ Rather than taking a regex argument, we just override the ``regex``
+ function to always return the active language-code as regex.
+ """
+ def __init__(self, urlconf_name, default_kwargs=None, app_name=None, namespace=None):
+ super(LocaleRegexURLResolver, self).__init__(
+ None, urlconf_name, default_kwargs, app_name, namespace)
+
+ @property
+ def regex(self):
+ language_code = get_language()
+ if language_code not in self._regex_dict:
+ regex_compiled = re.compile('^%s/' % language_code, re.UNICODE)
+ self._regex_dict[language_code] = regex_compiled
+ return self._regex_dict[language_code]
+
+def resolve(path, urlconf=None):
+ if urlconf is None:
+ urlconf = get_urlconf()
+ return get_resolver(urlconf).resolve(path)
+
+def reverse(viewname, urlconf=None, args=None, kwargs=None, prefix=None, current_app=None):
+ if urlconf is None:
+ urlconf = get_urlconf()
+ resolver = get_resolver(urlconf)
+ args = args or []
+ kwargs = kwargs or {}
+
+ if prefix is None:
+ prefix = get_script_prefix()
+
+ if not isinstance(viewname, six.string_types):
+ view = viewname
+ else:
+ parts = viewname.split(':')
+ parts.reverse()
+ view = parts[0]
+ path = parts[1:]
+
+ resolved_path = []
+ ns_pattern = ''
+ while path:
+ ns = path.pop()
+
+ # Lookup the name to see if it could be an app identifier
+ try:
+ app_list = resolver.app_dict[ns]
+ # Yes! Path part matches an app in the current Resolver
+ if current_app and current_app in app_list:
+ # If we are reversing for a particular app,
+ # use that namespace
+ ns = current_app
+ elif ns not in app_list:
+ # The name isn't shared by one of the instances
+ # (i.e., the default) so just pick the first instance
+ # as the default.
+ ns = app_list[0]
+ except KeyError:
+ pass
+
+ try:
+ extra, resolver = resolver.namespace_dict[ns]
+ resolved_path.append(ns)
+ ns_pattern = ns_pattern + extra
+ except KeyError as key:
+ if resolved_path:
+ raise NoReverseMatch(
+ "%s is not a registered namespace inside '%s'" %
+ (key, ':'.join(resolved_path)))
+ else:
+ raise NoReverseMatch("%s is not a registered namespace" %
+ key)
+ if ns_pattern:
+ resolver = get_ns_resolver(ns_pattern, resolver)
+
+ return iri_to_uri(resolver._reverse_with_prefix(view, prefix, *args, **kwargs))
+
+reverse_lazy = lazy(reverse, str)
+
+def clear_url_caches():
+ global _resolver_cache
+ global _ns_resolver_cache
+ global _callable_cache
+ _resolver_cache.clear()
+ _ns_resolver_cache.clear()
+ _callable_cache.clear()
+
+def set_script_prefix(prefix):
+ """
+ Sets the script prefix for the current thread.
+ """
+ if not prefix.endswith('/'):
+ prefix += '/'
+ _prefixes.value = prefix
+
+def get_script_prefix():
+ """
+ Returns the currently active script prefix. Useful for client code that
+ wishes to construct their own URLs manually (although accessing the request
+ instance is normally going to be a lot cleaner).
+ """
+ return getattr(_prefixes, "value", '/')
+
+def clear_script_prefix():
+ """
+ Unsets the script prefix for the current thread.
+ """
+ try:
+ del _prefixes.value
+ except AttributeError:
+ pass
+
+def set_urlconf(urlconf_name):
+ """
+ Sets the URLconf for the current thread (overriding the default one in
+ settings). Set to None to revert back to the default.
+ """
+ if urlconf_name:
+ _urlconfs.value = urlconf_name
+ else:
+ if hasattr(_urlconfs, "value"):
+ del _urlconfs.value
+
+def get_urlconf(default=None):
+ """
+ Returns the root URLconf to use for the current thread if it has been
+ changed from the default one.
+ """
+ return getattr(_urlconfs, "value", default)
+
+def is_valid_path(path, urlconf=None):
+ """
+ Returns True if the given path resolves against the default URL resolver,
+ False otherwise.
+
+ This is a convenience method to make working with "is this a match?" cases
+ easier, avoiding unnecessarily indented try...except blocks.
+ """
+ try:
+ resolve(path, urlconf)
+ return True
+ except Resolver404:
+ return False
diff --git a/lib/python2.7/site-packages/django/core/validators.py b/lib/python2.7/site-packages/django/core/validators.py
new file mode 100644
index 0000000..78716cc
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/validators.py
@@ -0,0 +1,221 @@
+from __future__ import unicode_literals
+
+import re
+
+from django.core.exceptions import ValidationError
+from django.utils.translation import ugettext_lazy as _, ungettext_lazy
+from django.utils.encoding import force_text
+from django.utils.ipv6 import is_valid_ipv6_address
+from django.utils import six
+from django.utils.six.moves.urllib.parse import urlsplit, urlunsplit
+
+
+# These values, if given to validate(), will trigger the self.required check.
+EMPTY_VALUES = (None, '', [], (), {})
+
+
+class RegexValidator(object):
+ regex = ''
+ message = _('Enter a valid value.')
+ code = 'invalid'
+
+ def __init__(self, regex=None, message=None, code=None):
+ if regex is not None:
+ self.regex = regex
+ if message is not None:
+ self.message = message
+ if code is not None:
+ self.code = code
+
+ # Compile the regex if it was not passed pre-compiled.
+ if isinstance(self.regex, six.string_types):
+ self.regex = re.compile(self.regex)
+
+ def __call__(self, value):
+ """
+ Validates that the input matches the regular expression.
+ """
+ if not self.regex.search(force_text(value)):
+ raise ValidationError(self.message, code=self.code)
+
+
+class URLValidator(RegexValidator):
+ regex = re.compile(
+ r'^(?:http|ftp)s?://' # http:// or https://
+ r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
+ r'localhost|' # localhost...
+ r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
+ r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
+ r'(?::\d+)?' # optional port
+ r'(?:/?|[/?]\S+)$', re.IGNORECASE)
+ message = _('Enter a valid URL.')
+
+ def __call__(self, value):
+ try:
+ super(URLValidator, self).__call__(value)
+ except ValidationError as e:
+ # Trivial case failed. Try for possible IDN domain
+ if value:
+ value = force_text(value)
+ scheme, netloc, path, query, fragment = urlsplit(value)
+ try:
+ netloc = netloc.encode('idna').decode('ascii') # IDN -> ACE
+ except UnicodeError: # invalid domain part
+ raise e
+ url = urlunsplit((scheme, netloc, path, query, fragment))
+ super(URLValidator, self).__call__(url)
+ else:
+ raise
+ else:
+ url = value
+
+
+def validate_integer(value):
+ try:
+ int(value)
+ except (ValueError, TypeError):
+ raise ValidationError(_('Enter a valid integer.'), code='invalid')
+
+
+class EmailValidator(object):
+ message = _('Enter a valid email address.')
+ code = 'invalid'
+ user_regex = re.compile(
+ r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*$" # dot-atom
+ r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"$)', # quoted-string
+ re.IGNORECASE)
+ domain_regex = re.compile(
+ r'(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}|[A-Z0-9-]{2,})\.?$' # domain
+ # literal form, ipv4 address (SMTP 4.1.3)
+ r'|^\[(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}\]$',
+ re.IGNORECASE)
+ domain_whitelist = ['localhost']
+
+ def __init__(self, message=None, code=None, whitelist=None):
+ if message is not None:
+ self.message = message
+ if code is not None:
+ self.code = code
+ if whitelist is not None:
+ self.domain_whitelist = whitelist
+
+ def __call__(self, value):
+ value = force_text(value)
+
+ if not value or '@' not in value:
+ raise ValidationError(self.message, code=self.code)
+
+ user_part, domain_part = value.rsplit('@', 1)
+
+ if not self.user_regex.match(user_part):
+ raise ValidationError(self.message, code=self.code)
+
+ if (not domain_part in self.domain_whitelist and
+ not self.domain_regex.match(domain_part)):
+ # Try for possible IDN domain-part
+ try:
+ domain_part = domain_part.encode('idna').decode('ascii')
+ if not self.domain_regex.match(domain_part):
+ raise ValidationError(self.message, code=self.code)
+ else:
+ return
+ except UnicodeError:
+ pass
+ raise ValidationError(self.message, code=self.code)
+
+validate_email = EmailValidator()
+
+slug_re = re.compile(r'^[-a-zA-Z0-9_]+$')
+validate_slug = RegexValidator(slug_re, _("Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens."), 'invalid')
+
+ipv4_re = re.compile(r'^(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}$')
+validate_ipv4_address = RegexValidator(ipv4_re, _('Enter a valid IPv4 address.'), 'invalid')
+
+
+def validate_ipv6_address(value):
+ if not is_valid_ipv6_address(value):
+ raise ValidationError(_('Enter a valid IPv6 address.'), code='invalid')
+
+
+def validate_ipv46_address(value):
+ try:
+ validate_ipv4_address(value)
+ except ValidationError:
+ try:
+ validate_ipv6_address(value)
+ except ValidationError:
+ raise ValidationError(_('Enter a valid IPv4 or IPv6 address.'), code='invalid')
+
+ip_address_validator_map = {
+ 'both': ([validate_ipv46_address], _('Enter a valid IPv4 or IPv6 address.')),
+ 'ipv4': ([validate_ipv4_address], _('Enter a valid IPv4 address.')),
+ 'ipv6': ([validate_ipv6_address], _('Enter a valid IPv6 address.')),
+}
+
+
+def ip_address_validators(protocol, unpack_ipv4):
+ """
+ Depending on the given parameters returns the appropriate validators for
+ the GenericIPAddressField.
+
+ This code is here, because it is exactly the same for the model and the form field.
+ """
+ if protocol != 'both' and unpack_ipv4:
+ raise ValueError(
+ "You can only use `unpack_ipv4` if `protocol` is set to 'both'")
+ try:
+ return ip_address_validator_map[protocol.lower()]
+ except KeyError:
+ raise ValueError("The protocol '%s' is unknown. Supported: %s"
+ % (protocol, list(ip_address_validator_map)))
+
+comma_separated_int_list_re = re.compile('^[\d,]+$')
+validate_comma_separated_integer_list = RegexValidator(comma_separated_int_list_re, _('Enter only digits separated by commas.'), 'invalid')
+
+
+class BaseValidator(object):
+ compare = lambda self, a, b: a is not b
+ clean = lambda self, x: x
+ message = _('Ensure this value is %(limit_value)s (it is %(show_value)s).')
+ code = 'limit_value'
+
+ def __init__(self, limit_value):
+ self.limit_value = limit_value
+
+ def __call__(self, value):
+ cleaned = self.clean(value)
+ params = {'limit_value': self.limit_value, 'show_value': cleaned}
+ if self.compare(cleaned, self.limit_value):
+ raise ValidationError(self.message, code=self.code, params=params)
+
+
+class MaxValueValidator(BaseValidator):
+ compare = lambda self, a, b: a > b
+ message = _('Ensure this value is less than or equal to %(limit_value)s.')
+ code = 'max_value'
+
+
+class MinValueValidator(BaseValidator):
+ compare = lambda self, a, b: a < b
+ message = _('Ensure this value is greater than or equal to %(limit_value)s.')
+ code = 'min_value'
+
+
+class MinLengthValidator(BaseValidator):
+ compare = lambda self, a, b: a < b
+ clean = lambda self, x: len(x)
+ message = ungettext_lazy(
+ 'Ensure this value has at least %(limit_value)d character (it has %(show_value)d).',
+ 'Ensure this value has at least %(limit_value)d characters (it has %(show_value)d).',
+ 'limit_value')
+ code = 'min_length'
+
+
+class MaxLengthValidator(BaseValidator):
+ compare = lambda self, a, b: a > b
+ clean = lambda self, x: len(x)
+ message = ungettext_lazy(
+ 'Ensure this value has at most %(limit_value)d character (it has %(show_value)d).',
+ 'Ensure this value has at most %(limit_value)d characters (it has %(show_value)d).',
+ 'limit_value')
+ code = 'max_length'
diff --git a/lib/python2.7/site-packages/django/core/wsgi.py b/lib/python2.7/site-packages/django/core/wsgi.py
new file mode 100644
index 0000000..edea333
--- /dev/null
+++ b/lib/python2.7/site-packages/django/core/wsgi.py
@@ -0,0 +1,13 @@
+from django.core.handlers.wsgi import WSGIHandler
+
+
+def get_wsgi_application():
+ """
+ The public interface to Django's WSGI support. Should return a WSGI
+ callable.
+
+ Allows us to avoid making django.core.handlers.WSGIHandler public API, in
+ case the internal WSGI implementation changes or moves in the future.
+
+ """
+ return WSGIHandler()