diff options
Diffstat (limited to 'lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2')
7 files changed, 0 insertions, 660 deletions
diff --git a/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/__init__.py b/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/__init__.py +++ /dev/null diff --git a/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/base.py b/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/base.py deleted file mode 100644 index 9aa8b47..0000000 --- a/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/base.py +++ /dev/null @@ -1,184 +0,0 @@ -""" -PostgreSQL database backend for Django. - -Requires psycopg 2: http://initd.org/projects/psycopg2 -""" - -import sys - -from django.db.backends import * -from django.db.backends.postgresql_psycopg2.operations import DatabaseOperations -from django.db.backends.postgresql_psycopg2.client import DatabaseClient -from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation -from django.db.backends.postgresql_psycopg2.version import get_version -from django.db.backends.postgresql_psycopg2.introspection import DatabaseIntrospection -from django.utils.encoding import force_str -from django.utils.functional import cached_property -from django.utils.safestring import SafeText, SafeBytes -from django.utils.timezone import utc - -try: - import psycopg2 as Database - import psycopg2.extensions -except ImportError as e: - from django.core.exceptions import ImproperlyConfigured - raise ImproperlyConfigured("Error loading psycopg2 module: %s" % e) - -DatabaseError = Database.DatabaseError -IntegrityError = Database.IntegrityError - -psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) -psycopg2.extensions.register_adapter(SafeBytes, psycopg2.extensions.QuotedString) -psycopg2.extensions.register_adapter(SafeText, psycopg2.extensions.QuotedString) - -def utc_tzinfo_factory(offset): - if offset != 0: - raise AssertionError("database connection isn't set to UTC") - return utc - -class DatabaseFeatures(BaseDatabaseFeatures): - needs_datetime_string_cast = False - can_return_id_from_insert = True - requires_rollback_on_dirty_transaction = True - has_real_datatype = True - can_defer_constraint_checks = True - has_select_for_update = True - has_select_for_update_nowait = True - has_bulk_insert = True - uses_savepoints = True - supports_tablespaces = True - supports_transactions = True - can_distinct_on_fields = True - -class DatabaseWrapper(BaseDatabaseWrapper): - vendor = 'postgresql' - operators = { - 'exact': '= %s', - 'iexact': '= UPPER(%s)', - 'contains': 'LIKE %s', - 'icontains': 'LIKE UPPER(%s)', - 'regex': '~ %s', - 'iregex': '~* %s', - 'gt': '> %s', - 'gte': '>= %s', - 'lt': '< %s', - 'lte': '<= %s', - 'startswith': 'LIKE %s', - 'endswith': 'LIKE %s', - 'istartswith': 'LIKE UPPER(%s)', - 'iendswith': 'LIKE UPPER(%s)', - } - - Database = Database - - def __init__(self, *args, **kwargs): - super(DatabaseWrapper, self).__init__(*args, **kwargs) - - opts = self.settings_dict["OPTIONS"] - RC = psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED - self.isolation_level = opts.get('isolation_level', RC) - - self.features = DatabaseFeatures(self) - self.ops = DatabaseOperations(self) - self.client = DatabaseClient(self) - self.creation = DatabaseCreation(self) - self.introspection = DatabaseIntrospection(self) - self.validation = BaseDatabaseValidation(self) - - def get_connection_params(self): - settings_dict = self.settings_dict - if not settings_dict['NAME']: - from django.core.exceptions import ImproperlyConfigured - raise ImproperlyConfigured( - "settings.DATABASES is improperly configured. " - "Please supply the NAME value.") - conn_params = { - 'database': settings_dict['NAME'], - } - conn_params.update(settings_dict['OPTIONS']) - if 'autocommit' in conn_params: - del conn_params['autocommit'] - if 'isolation_level' in conn_params: - del conn_params['isolation_level'] - if settings_dict['USER']: - conn_params['user'] = settings_dict['USER'] - if settings_dict['PASSWORD']: - conn_params['password'] = force_str(settings_dict['PASSWORD']) - if settings_dict['HOST']: - conn_params['host'] = settings_dict['HOST'] - if settings_dict['PORT']: - conn_params['port'] = settings_dict['PORT'] - return conn_params - - def get_new_connection(self, conn_params): - return Database.connect(**conn_params) - - def init_connection_state(self): - settings_dict = self.settings_dict - self.connection.set_client_encoding('UTF8') - tz = 'UTC' if settings.USE_TZ else settings_dict.get('TIME_ZONE') - if tz: - try: - get_parameter_status = self.connection.get_parameter_status - except AttributeError: - # psycopg2 < 2.0.12 doesn't have get_parameter_status - conn_tz = None - else: - conn_tz = get_parameter_status('TimeZone') - - if conn_tz != tz: - self.connection.cursor().execute( - self.ops.set_time_zone_sql(), [tz]) - # Commit after setting the time zone (see #17062) - self.connection.commit() - self.connection.set_isolation_level(self.isolation_level) - - def create_cursor(self): - cursor = self.connection.cursor() - cursor.tzinfo_factory = utc_tzinfo_factory if settings.USE_TZ else None - return cursor - - def _set_isolation_level(self, isolation_level): - assert isolation_level in range(1, 5) # Use set_autocommit for level = 0 - if self.psycopg2_version >= (2, 4, 2): - self.connection.set_session(isolation_level=isolation_level) - else: - self.connection.set_isolation_level(isolation_level) - - def _set_autocommit(self, autocommit): - with self.wrap_database_errors: - if self.psycopg2_version >= (2, 4, 2): - self.connection.autocommit = autocommit - else: - if autocommit: - level = psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT - else: - level = self.isolation_level - self.connection.set_isolation_level(level) - - def check_constraints(self, table_names=None): - """ - To check constraints, we set constraints to immediate. Then, when, we're done we must ensure they - are returned to deferred. - """ - self.cursor().execute('SET CONSTRAINTS ALL IMMEDIATE') - self.cursor().execute('SET CONSTRAINTS ALL DEFERRED') - - def is_usable(self): - try: - # Use a psycopg cursor directly, bypassing Django's utilities. - self.connection.cursor().execute("SELECT 1") - except Database.Error: - return False - else: - return True - - @cached_property - def psycopg2_version(self): - version = psycopg2.__version__.split(' ', 1)[0] - return tuple(int(v) for v in version.split('.')) - - @cached_property - def pg_version(self): - with self.temporary_connection(): - return get_version(self.connection) diff --git a/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/client.py b/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/client.py deleted file mode 100644 index a5c0296..0000000 --- a/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/client.py +++ /dev/null @@ -1,23 +0,0 @@ -import os -import sys - -from django.db.backends import BaseDatabaseClient - -class DatabaseClient(BaseDatabaseClient): - executable_name = 'psql' - - def runshell(self): - settings_dict = self.connection.settings_dict - args = [self.executable_name] - if settings_dict['USER']: - args += ["-U", settings_dict['USER']] - if settings_dict['HOST']: - args.extend(["-h", settings_dict['HOST']]) - if settings_dict['PORT']: - args.extend(["-p", str(settings_dict['PORT'])]) - args += [settings_dict['NAME']] - if os.name == 'nt': - sys.exit(os.system(" ".join(args))) - else: - os.execvp(self.executable_name, args) - diff --git a/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/creation.py b/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/creation.py deleted file mode 100644 index d4260e0..0000000 --- a/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/creation.py +++ /dev/null @@ -1,77 +0,0 @@ -from django.db.backends.creation import BaseDatabaseCreation -from django.db.backends.util import truncate_name - - -class DatabaseCreation(BaseDatabaseCreation): - # This dictionary maps Field objects to their associated PostgreSQL column - # types, as strings. Column-type strings can contain format strings; they'll - # be interpolated against the values of Field.__dict__ before being output. - # If a column type is set to None, it won't be included in the output. - data_types = { - 'AutoField': 'serial', - 'BinaryField': 'bytea', - 'BooleanField': 'boolean', - 'CharField': 'varchar(%(max_length)s)', - 'CommaSeparatedIntegerField': 'varchar(%(max_length)s)', - 'DateField': 'date', - 'DateTimeField': 'timestamp with time zone', - 'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)', - 'FileField': 'varchar(%(max_length)s)', - 'FilePathField': 'varchar(%(max_length)s)', - 'FloatField': 'double precision', - 'IntegerField': 'integer', - 'BigIntegerField': 'bigint', - 'IPAddressField': 'inet', - 'GenericIPAddressField': 'inet', - 'NullBooleanField': 'boolean', - 'OneToOneField': 'integer', - 'PositiveIntegerField': 'integer CHECK ("%(column)s" >= 0)', - 'PositiveSmallIntegerField': 'smallint CHECK ("%(column)s" >= 0)', - 'SlugField': 'varchar(%(max_length)s)', - 'SmallIntegerField': 'smallint', - 'TextField': 'text', - 'TimeField': 'time', - } - - def sql_table_creation_suffix(self): - assert self.connection.settings_dict['TEST_COLLATION'] is None, "PostgreSQL does not support collation setting at database creation time." - if self.connection.settings_dict['TEST_CHARSET']: - return "WITH ENCODING '%s'" % self.connection.settings_dict['TEST_CHARSET'] - return '' - - def sql_indexes_for_field(self, model, f, style): - output = [] - if f.db_index or f.unique: - qn = self.connection.ops.quote_name - db_table = model._meta.db_table - tablespace = f.db_tablespace or model._meta.db_tablespace - if tablespace: - tablespace_sql = self.connection.ops.tablespace_sql(tablespace) - if tablespace_sql: - tablespace_sql = ' ' + tablespace_sql - else: - tablespace_sql = '' - - def get_index_sql(index_name, opclass=''): - return (style.SQL_KEYWORD('CREATE INDEX') + ' ' + - style.SQL_TABLE(qn(truncate_name(index_name,self.connection.ops.max_name_length()))) + ' ' + - style.SQL_KEYWORD('ON') + ' ' + - style.SQL_TABLE(qn(db_table)) + ' ' + - "(%s%s)" % (style.SQL_FIELD(qn(f.column)), opclass) + - "%s;" % tablespace_sql) - - if not f.unique: - output = [get_index_sql('%s_%s' % (db_table, f.column))] - - # Fields with database column types of `varchar` and `text` need - # a second index that specifies their operator class, which is - # needed when performing correct LIKE queries outside the - # C locale. See #12234. - db_type = f.db_type(connection=self.connection) - if db_type.startswith('varchar'): - output.append(get_index_sql('%s_%s_like' % (db_table, f.column), - ' varchar_pattern_ops')) - elif db_type.startswith('text'): - output.append(get_index_sql('%s_%s_like' % (db_table, f.column), - ' text_pattern_ops')) - return output diff --git a/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/introspection.py b/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/introspection.py deleted file mode 100644 index ea4e3e1..0000000 --- a/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/introspection.py +++ /dev/null @@ -1,111 +0,0 @@ -from __future__ import unicode_literals - -from django.db.backends import BaseDatabaseIntrospection, FieldInfo -from django.utils.encoding import force_text - - -class DatabaseIntrospection(BaseDatabaseIntrospection): - # Maps type codes to Django Field types. - data_types_reverse = { - 16: 'BooleanField', - 17: 'BinaryField', - 20: 'BigIntegerField', - 21: 'SmallIntegerField', - 23: 'IntegerField', - 25: 'TextField', - 700: 'FloatField', - 701: 'FloatField', - 869: 'GenericIPAddressField', - 1042: 'CharField', # blank-padded - 1043: 'CharField', - 1082: 'DateField', - 1083: 'TimeField', - 1114: 'DateTimeField', - 1184: 'DateTimeField', - 1266: 'TimeField', - 1700: 'DecimalField', - } - - ignored_tables = [] - - def get_table_list(self, cursor): - "Returns a list of table names in the current database." - cursor.execute(""" - SELECT c.relname - FROM pg_catalog.pg_class c - LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace - WHERE c.relkind IN ('r', 'v', '') - AND n.nspname NOT IN ('pg_catalog', 'pg_toast') - AND pg_catalog.pg_table_is_visible(c.oid)""") - return [row[0] for row in cursor.fetchall() if row[0] not in self.ignored_tables] - - def get_table_description(self, cursor, table_name): - "Returns a description of the table, with the DB-API cursor.description interface." - # As cursor.description does not return reliably the nullable property, - # we have to query the information_schema (#7783) - cursor.execute(""" - SELECT column_name, is_nullable - FROM information_schema.columns - WHERE table_name = %s""", [table_name]) - null_map = dict(cursor.fetchall()) - cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name)) - return [FieldInfo(*((force_text(line[0]),) + line[1:6] + (null_map[force_text(line[0])]=='YES',))) - for line in cursor.description] - - def get_relations(self, cursor, table_name): - """ - Returns a dictionary of {field_index: (field_index_other_table, other_table)} - representing all relationships to the given table. Indexes are 0-based. - """ - cursor.execute(""" - SELECT con.conkey, con.confkey, c2.relname - FROM pg_constraint con, pg_class c1, pg_class c2 - WHERE c1.oid = con.conrelid - AND c2.oid = con.confrelid - AND c1.relname = %s - AND con.contype = 'f'""", [table_name]) - relations = {} - for row in cursor.fetchall(): - # row[0] and row[1] are single-item lists, so grab the single item. - relations[row[0][0] - 1] = (row[1][0] - 1, row[2]) - return relations - - def get_key_columns(self, cursor, table_name): - key_columns = [] - cursor.execute(""" - SELECT kcu.column_name, ccu.table_name AS referenced_table, ccu.column_name AS referenced_column - FROM information_schema.constraint_column_usage ccu - LEFT JOIN information_schema.key_column_usage kcu - ON ccu.constraint_catalog = kcu.constraint_catalog - AND ccu.constraint_schema = kcu.constraint_schema - AND ccu.constraint_name = kcu.constraint_name - LEFT JOIN information_schema.table_constraints tc - ON ccu.constraint_catalog = tc.constraint_catalog - AND ccu.constraint_schema = tc.constraint_schema - AND ccu.constraint_name = tc.constraint_name - WHERE kcu.table_name = %s AND tc.constraint_type = 'FOREIGN KEY'""" , [table_name]) - key_columns.extend(cursor.fetchall()) - return key_columns - - def get_indexes(self, cursor, table_name): - # This query retrieves each index on the given table, including the - # first associated field name - cursor.execute(""" - SELECT attr.attname, idx.indkey, idx.indisunique, idx.indisprimary - FROM pg_catalog.pg_class c, pg_catalog.pg_class c2, - pg_catalog.pg_index idx, pg_catalog.pg_attribute attr - WHERE c.oid = idx.indrelid - AND idx.indexrelid = c2.oid - AND attr.attrelid = c.oid - AND attr.attnum = idx.indkey[0] - AND c.relname = %s""", [table_name]) - indexes = {} - for row in cursor.fetchall(): - # row[1] (idx.indkey) is stored in the DB as an array. It comes out as - # a string of space-separated integers. This designates the field - # indexes (1-based) of the fields that have indexes on the table. - # Here, we skip any indexes across multiple fields. - if ' ' in row[1]: - continue - indexes[row[0]] = {'primary_key': row[3], 'unique': row[2]} - return indexes diff --git a/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/operations.py b/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/operations.py deleted file mode 100644 index c5aab84..0000000 --- a/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/operations.py +++ /dev/null @@ -1,222 +0,0 @@ -from __future__ import unicode_literals - -from django.conf import settings -from django.db.backends import BaseDatabaseOperations - - -class DatabaseOperations(BaseDatabaseOperations): - def __init__(self, connection): - super(DatabaseOperations, self).__init__(connection) - - def date_extract_sql(self, lookup_type, field_name): - # http://www.postgresql.org/docs/current/static/functions-datetime.html#FUNCTIONS-DATETIME-EXTRACT - if lookup_type == 'week_day': - # For consistency across backends, we return Sunday=1, Saturday=7. - return "EXTRACT('dow' FROM %s) + 1" % field_name - else: - return "EXTRACT('%s' FROM %s)" % (lookup_type, field_name) - - def date_interval_sql(self, sql, connector, timedelta): - """ - implements the interval functionality for expressions - format for Postgres: - (datefield + interval '3 days 200 seconds 5 microseconds') - """ - modifiers = [] - if timedelta.days: - modifiers.append('%s days' % timedelta.days) - if timedelta.seconds: - modifiers.append('%s seconds' % timedelta.seconds) - if timedelta.microseconds: - modifiers.append('%s microseconds' % timedelta.microseconds) - mods = ' '.join(modifiers) - conn = ' %s ' % connector - return '(%s)' % conn.join([sql, 'interval \'%s\'' % mods]) - - def date_trunc_sql(self, lookup_type, field_name): - # http://www.postgresql.org/docs/current/static/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC - return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name) - - def datetime_extract_sql(self, lookup_type, field_name, tzname): - if settings.USE_TZ: - field_name = "%s AT TIME ZONE %%s" % field_name - params = [tzname] - else: - params = [] - # http://www.postgresql.org/docs/current/static/functions-datetime.html#FUNCTIONS-DATETIME-EXTRACT - if lookup_type == 'week_day': - # For consistency across backends, we return Sunday=1, Saturday=7. - sql = "EXTRACT('dow' FROM %s) + 1" % field_name - else: - sql = "EXTRACT('%s' FROM %s)" % (lookup_type, field_name) - return sql, params - - def datetime_trunc_sql(self, lookup_type, field_name, tzname): - if settings.USE_TZ: - field_name = "%s AT TIME ZONE %%s" % field_name - params = [tzname] - else: - params = [] - # http://www.postgresql.org/docs/current/static/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC - sql = "DATE_TRUNC('%s', %s)" % (lookup_type, field_name) - return sql, params - - def deferrable_sql(self): - return " DEFERRABLE INITIALLY DEFERRED" - - def lookup_cast(self, lookup_type): - lookup = '%s' - - # Cast text lookups to text to allow things like filter(x__contains=4) - if lookup_type in ('iexact', 'contains', 'icontains', 'startswith', - 'istartswith', 'endswith', 'iendswith', 'regex', 'iregex'): - lookup = "%s::text" - - # Use UPPER(x) for case-insensitive lookups; it's faster. - if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'): - lookup = 'UPPER(%s)' % lookup - - return lookup - - def field_cast_sql(self, db_type, internal_type): - if internal_type == "GenericIPAddressField" or internal_type == "IPAddressField": - return 'HOST(%s)' - return '%s' - - def last_insert_id(self, cursor, table_name, pk_name): - # Use pg_get_serial_sequence to get the underlying sequence name - # from the table name and column name (available since PostgreSQL 8) - cursor.execute("SELECT CURRVAL(pg_get_serial_sequence('%s','%s'))" % ( - self.quote_name(table_name), pk_name)) - return cursor.fetchone()[0] - - def no_limit_value(self): - return None - - def quote_name(self, name): - if name.startswith('"') and name.endswith('"'): - return name # Quoting once is enough. - return '"%s"' % name - - def set_time_zone_sql(self): - return "SET TIME ZONE %s" - - def sql_flush(self, style, tables, sequences, allow_cascade=False): - if tables: - # Perform a single SQL 'TRUNCATE x, y, z...;' statement. It allows - # us to truncate tables referenced by a foreign key in any other - # table. - tables_sql = ', '.join( - style.SQL_FIELD(self.quote_name(table)) for table in tables) - if allow_cascade: - sql = ['%s %s %s;' % ( - style.SQL_KEYWORD('TRUNCATE'), - tables_sql, - style.SQL_KEYWORD('CASCADE'), - )] - else: - sql = ['%s %s;' % ( - style.SQL_KEYWORD('TRUNCATE'), - tables_sql, - )] - sql.extend(self.sequence_reset_by_name_sql(style, sequences)) - return sql - else: - return [] - - def sequence_reset_by_name_sql(self, style, sequences): - # 'ALTER SEQUENCE sequence_name RESTART WITH 1;'... style SQL statements - # to reset sequence indices - sql = [] - for sequence_info in sequences: - table_name = sequence_info['table'] - column_name = sequence_info['column'] - if not (column_name and len(column_name) > 0): - # This will be the case if it's an m2m using an autogenerated - # intermediate table (see BaseDatabaseIntrospection.sequence_list) - column_name = 'id' - sql.append("%s setval(pg_get_serial_sequence('%s','%s'), 1, false);" % \ - (style.SQL_KEYWORD('SELECT'), - style.SQL_TABLE(self.quote_name(table_name)), - style.SQL_FIELD(column_name)) - ) - return sql - - def tablespace_sql(self, tablespace, inline=False): - if inline: - return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace) - else: - return "TABLESPACE %s" % self.quote_name(tablespace) - - def sequence_reset_sql(self, style, model_list): - from django.db import models - output = [] - qn = self.quote_name - for model in model_list: - # Use `coalesce` to set the sequence for each model to the max pk value if there are records, - # or 1 if there are none. Set the `is_called` property (the third argument to `setval`) to true - # if there are records (as the max pk value is already in use), otherwise set it to false. - # Use pg_get_serial_sequence to get the underlying sequence name from the table name - # and column name (available since PostgreSQL 8) - - for f in model._meta.local_fields: - if isinstance(f, models.AutoField): - output.append("%s setval(pg_get_serial_sequence('%s','%s'), coalesce(max(%s), 1), max(%s) %s null) %s %s;" % \ - (style.SQL_KEYWORD('SELECT'), - style.SQL_TABLE(qn(model._meta.db_table)), - style.SQL_FIELD(f.column), - style.SQL_FIELD(qn(f.column)), - style.SQL_FIELD(qn(f.column)), - style.SQL_KEYWORD('IS NOT'), - style.SQL_KEYWORD('FROM'), - style.SQL_TABLE(qn(model._meta.db_table)))) - break # Only one AutoField is allowed per model, so don't bother continuing. - for f in model._meta.many_to_many: - if not f.rel.through: - output.append("%s setval(pg_get_serial_sequence('%s','%s'), coalesce(max(%s), 1), max(%s) %s null) %s %s;" % \ - (style.SQL_KEYWORD('SELECT'), - style.SQL_TABLE(qn(f.m2m_db_table())), - style.SQL_FIELD('id'), - style.SQL_FIELD(qn('id')), - style.SQL_FIELD(qn('id')), - style.SQL_KEYWORD('IS NOT'), - style.SQL_KEYWORD('FROM'), - style.SQL_TABLE(qn(f.m2m_db_table())))) - return output - - def prep_for_iexact_query(self, x): - return x - - def max_name_length(self): - """ - Returns the maximum length of an identifier. - - Note that the maximum length of an identifier is 63 by default, but can - be changed by recompiling PostgreSQL after editing the NAMEDATALEN - macro in src/include/pg_config_manual.h . - - This implementation simply returns 63, but can easily be overridden by a - custom database backend that inherits most of its behavior from this one. - """ - - return 63 - - def distinct_sql(self, fields): - if fields: - return 'DISTINCT ON (%s)' % ', '.join(fields) - else: - return 'DISTINCT' - - def last_executed_query(self, cursor, sql, params): - # http://initd.org/psycopg/docs/cursor.html#cursor.query - # The query attribute is a Psycopg extension to the DB API 2.0. - if cursor.query is not None: - return cursor.query.decode('utf-8') - return None - - def return_insert_id(self): - return "RETURNING %s", () - - def bulk_insert_sql(self, fields, num_values): - items_sql = "(%s)" % ", ".join(["%s"] * len(fields)) - return "VALUES " + ", ".join([items_sql] * num_values) diff --git a/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/version.py b/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/version.py deleted file mode 100644 index 8ef5167..0000000 --- a/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/version.py +++ /dev/null @@ -1,43 +0,0 @@ -""" -Extracts the version of the PostgreSQL server. -""" - -import re - -# This reg-exp is intentionally fairly flexible here. -# Needs to be able to handle stuff like: -# PostgreSQL 8.3.6 -# EnterpriseDB 8.3 -# PostgreSQL 8.3 beta4 -# PostgreSQL 8.4beta1 -VERSION_RE = re.compile(r'\S+ (\d+)\.(\d+)\.?(\d+)?') - - -def _parse_version(text): - "Internal parsing method. Factored out for testing purposes." - major, major2, minor = VERSION_RE.search(text).groups() - try: - return int(major) * 10000 + int(major2) * 100 + int(minor) - except (ValueError, TypeError): - return int(major) * 10000 + int(major2) * 100 - -def get_version(connection): - """ - Returns an integer representing the major, minor and revision number of the - server. Format is the one used for the return value of libpq - PQServerVersion()/``server_version`` connection attribute (available in - newer psycopg2 versions.) - - For example, 80304 for 8.3.4. The last two digits will be 00 in the case of - releases (e.g., 80400 for 'PostgreSQL 8.4') or in the case of beta and - prereleases (e.g. 90100 for 'PostgreSQL 9.1beta2'). - - PQServerVersion()/``server_version`` doesn't execute a query so try that - first, then fallback to a ``SELECT version()`` query. - """ - if hasattr(connection, 'server_version'): - return connection.server_version - else: - cursor = connection.cursor() - cursor.execute("SELECT version()") - return _parse_version(cursor.fetchone()[0]) |