├── .gitignore ├── LICENSE ├── README.rst ├── django-redshift ├── __init__.py ├── base.py ├── client.py ├── creation.py ├── introspection.py ├── operations.py └── version.py └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | *pyc 2 | *swp 3 | .DS_Store 4 | dist 5 | *.egg-info 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2013 Matt George 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | django-redshift 2 | ================== 3 | 4 | Amazon Redshift backend for Django. 5 | 6 | Requirements 7 | ------------- 8 | * pysocpg2 >= 2.5 9 | * django 1.5 10 | 11 | 12 | Usage 13 | ----- 14 | 15 | 16 | -------------------------------------------------------------------------------- /django-redshift/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/binarymatt/django-redshift/8302039d643ba98c2080669d29802eddd4959117/django-redshift/__init__.py -------------------------------------------------------------------------------- /django-redshift/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | PostgreSQL database backend for Django. 3 | 4 | Requires psycopg 2: http://initd.org/projects/psycopg2 5 | """ 6 | import logging 7 | import sys 8 | 9 | from django.db import utils 10 | from django.db.backends import * 11 | from django.db.backends.signals import connection_created 12 | from django.db.backends.postgresql_psycopg2.operations import DatabaseOperations 13 | from django.db.backends.postgresql_psycopg2.client import DatabaseClient 14 | from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation 15 | from django.db.backends.postgresql_psycopg2.version import get_version 16 | from django.db.backends.postgresql_psycopg2.introspection import DatabaseIntrospection 17 | from django.utils.encoding import force_str 18 | from django.utils.safestring import SafeText, SafeBytes 19 | from django.utils import six 20 | from django.utils.timezone import utc 21 | 22 | try: 23 | import psycopg2 as Database 24 | import psycopg2.extensions 25 | except ImportError as e: 26 | from django.core.exceptions import ImproperlyConfigured 27 | raise ImproperlyConfigured("Error loading psycopg2 module: %s" % e) 28 | 29 | DatabaseError = Database.DatabaseError 30 | IntegrityError = Database.IntegrityError 31 | 32 | psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 33 | psycopg2.extensions.register_adapter(SafeBytes, psycopg2.extensions.QuotedString) 34 | psycopg2.extensions.register_adapter(SafeText, psycopg2.extensions.QuotedString) 35 | 36 | logger = logging.getLogger('django.db.backends') 37 | 38 | def utc_tzinfo_factory(offset): 39 | if offset != 0: 40 | raise AssertionError("database connection isn't set to UTC") 41 | return utc 42 | 43 | class CursorWrapper(object): 44 | """ 45 | A thin wrapper around psycopg2's normal cursor class so that we can catch 46 | particular exception instances and reraise them with the right types. 47 | """ 48 | 49 | def __init__(self, cursor): 50 | self.cursor = cursor 51 | 52 | def execute(self, query, args=None): 53 | try: 54 | return self.cursor.execute(query, args) 55 | except Database.IntegrityError as e: 56 | six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2]) 57 | except Database.DatabaseError as e: 58 | six.reraise(utils.DatabaseError, utils.DatabaseError(*tuple(e.args)), sys.exc_info()[2]) 59 | 60 | def executemany(self, query, args): 61 | try: 62 | return self.cursor.executemany(query, args) 63 | except Database.IntegrityError as e: 64 | six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2]) 65 | except Database.DatabaseError as e: 66 | six.reraise(utils.DatabaseError, utils.DatabaseError(*tuple(e.args)), sys.exc_info()[2]) 67 | 68 | def __getattr__(self, attr): 69 | if attr in self.__dict__: 70 | return self.__dict__[attr] 71 | else: 72 | return getattr(self.cursor, attr) 73 | 74 | def __iter__(self): 75 | return iter(self.cursor) 76 | 77 | class DatabaseFeatures(BaseDatabaseFeatures): 78 | needs_datetime_string_cast = False 79 | can_return_id_from_insert = True 80 | requires_rollback_on_dirty_transaction = True 81 | has_real_datatype = True 82 | can_defer_constraint_checks = True 83 | has_select_for_update = True 84 | has_select_for_update_nowait = True 85 | has_bulk_insert = True 86 | supports_tablespaces = True 87 | supports_transactions = True 88 | can_distinct_on_fields = True 89 | 90 | class DatabaseWrapper(BaseDatabaseWrapper): 91 | vendor = 'postgresql' 92 | operators = { 93 | 'exact': '= %s', 94 | 'iexact': '= UPPER(%s)', 95 | 'contains': 'LIKE %s', 96 | 'icontains': 'LIKE UPPER(%s)', 97 | 'regex': '~ %s', 98 | 'iregex': '~* %s', 99 | 'gt': '> %s', 100 | 'gte': '>= %s', 101 | 'lt': '< %s', 102 | 'lte': '<= %s', 103 | 'startswith': 'LIKE %s', 104 | 'endswith': 'LIKE %s', 105 | 'istartswith': 'LIKE UPPER(%s)', 106 | 'iendswith': 'LIKE UPPER(%s)', 107 | } 108 | 109 | def __init__(self, *args, **kwargs): 110 | super(DatabaseWrapper, self).__init__(*args, **kwargs) 111 | 112 | self.features = DatabaseFeatures(self) 113 | autocommit = self.settings_dict["OPTIONS"].get('autocommit', False) 114 | self.features.uses_autocommit = autocommit 115 | #if autocommit: 116 | # level = psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT 117 | #else: 118 | level = psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE 119 | self._set_isolation_level(level) 120 | self.ops = DatabaseOperations(self) 121 | self.client = DatabaseClient(self) 122 | self.creation = DatabaseCreation(self) 123 | self.introspection = DatabaseIntrospection(self) 124 | self.validation = BaseDatabaseValidation(self) 125 | self._pg_version = None 126 | 127 | def check_constraints(self, table_names=None): 128 | """ 129 | To check constraints, we set constraints to immediate. Then, when, we're done we must ensure they 130 | are returned to deferred. 131 | """ 132 | self.cursor().execute('SET CONSTRAINTS ALL IMMEDIATE') 133 | self.cursor().execute('SET CONSTRAINTS ALL DEFERRED') 134 | 135 | def close(self): 136 | self.validate_thread_sharing() 137 | if self.connection is None: 138 | return 139 | 140 | try: 141 | self.connection.close() 142 | self.connection = None 143 | except Database.Error: 144 | # In some cases (database restart, network connection lost etc...) 145 | # the connection to the database is lost without giving Django a 146 | # notification. If we don't set self.connection to None, the error 147 | # will occur a every request. 148 | self.connection = None 149 | logger.warning('psycopg2 error while closing the connection.', 150 | exc_info=sys.exc_info() 151 | ) 152 | raise 153 | 154 | def _get_pg_version(self): 155 | if self._pg_version is None: 156 | self._pg_version = get_version(self.connection) 157 | return self._pg_version 158 | pg_version = property(_get_pg_version) 159 | 160 | def _cursor(self): 161 | settings_dict = self.settings_dict 162 | if self.connection is None: 163 | if not settings_dict['NAME']: 164 | from django.core.exceptions import ImproperlyConfigured 165 | raise ImproperlyConfigured( 166 | "settings.DATABASES is improperly configured. " 167 | "Please supply the NAME value.") 168 | conn_params = { 169 | 'database': settings_dict['NAME'], 170 | } 171 | conn_params.update(settings_dict['OPTIONS']) 172 | if 'autocommit' in conn_params: 173 | del conn_params['autocommit'] 174 | if settings_dict['USER']: 175 | conn_params['user'] = settings_dict['USER'] 176 | if settings_dict['PASSWORD']: 177 | conn_params['password'] = force_str(settings_dict['PASSWORD']) 178 | if settings_dict['HOST']: 179 | conn_params['host'] = settings_dict['HOST'] 180 | if settings_dict['PORT']: 181 | conn_params['port'] = settings_dict['PORT'] 182 | self.connection = Database.connect(**conn_params) 183 | self.connection.set_client_encoding('UTF8') 184 | tz = 'UTC' if settings.USE_TZ else settings_dict.get('TIME_ZONE') 185 | if tz: 186 | try: 187 | get_parameter_status = self.connection.get_parameter_status 188 | except AttributeError: 189 | # psycopg2 < 2.0.12 doesn't have get_parameter_status 190 | conn_tz = None 191 | else: 192 | conn_tz = get_parameter_status('TimeZone') 193 | 194 | if conn_tz != tz: 195 | # Set the time zone in autocommit mode (see #17062) 196 | self.connection.set_isolation_level( 197 | psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 198 | self.connection.cursor().execute( 199 | self.ops.set_time_zone_sql(), [tz]) 200 | self.connection.set_isolation_level(self.isolation_level) 201 | self._get_pg_version() 202 | connection_created.send(sender=self.__class__, connection=self) 203 | cursor = self.connection.cursor() 204 | cursor.tzinfo_factory = utc_tzinfo_factory if settings.USE_TZ else None 205 | return CursorWrapper(cursor) 206 | 207 | def _enter_transaction_management(self, managed): 208 | """ 209 | Switch the isolation level when needing transaction support, so that 210 | the same transaction is visible across all the queries. 211 | """ 212 | if self.features.uses_autocommit and managed and not self.isolation_level: 213 | self._set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 214 | 215 | def _leave_transaction_management(self, managed): 216 | """ 217 | If the normal operating mode is "autocommit", switch back to that when 218 | leaving transaction management. 219 | """ 220 | if self.features.uses_autocommit and not managed and self.isolation_level: 221 | self._set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 222 | 223 | def _set_isolation_level(self, level): 224 | """ 225 | Do all the related feature configurations for changing isolation 226 | levels. This doesn't touch the uses_autocommit feature, since that 227 | controls the movement *between* isolation levels. 228 | """ 229 | assert level in range(5) 230 | try: 231 | if self.connection is not None: 232 | self.connection.set_isolation_level(level) 233 | finally: 234 | self.isolation_level = level 235 | self.features.uses_savepoints = bool(level) 236 | 237 | def _commit(self): 238 | if self.connection is not None: 239 | try: 240 | return self.connection.commit() 241 | except Database.IntegrityError as e: 242 | six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2]) 243 | -------------------------------------------------------------------------------- /django-redshift/client.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | from django.db.backends import BaseDatabaseClient 5 | 6 | class DatabaseClient(BaseDatabaseClient): 7 | executable_name = 'psql' 8 | 9 | def runshell(self): 10 | settings_dict = self.connection.settings_dict 11 | args = [self.executable_name] 12 | if settings_dict['USER']: 13 | args += ["-U", settings_dict['USER']] 14 | if settings_dict['HOST']: 15 | args.extend(["-h", settings_dict['HOST']]) 16 | if settings_dict['PORT']: 17 | args.extend(["-p", str(settings_dict['PORT'])]) 18 | args += [settings_dict['NAME']] 19 | if os.name == 'nt': 20 | sys.exit(os.system(" ".join(args))) 21 | else: 22 | os.execvp(self.executable_name, args) 23 | 24 | -------------------------------------------------------------------------------- /django-redshift/creation.py: -------------------------------------------------------------------------------- 1 | import psycopg2.extensions 2 | 3 | from django.db.backends.creation import BaseDatabaseCreation 4 | from django.db.backends.util import truncate_name 5 | 6 | 7 | class DatabaseCreation(BaseDatabaseCreation): 8 | # This dictionary maps Field objects to their associated PostgreSQL column 9 | # types, as strings. Column-type strings can contain format strings; they'll 10 | # be interpolated against the values of Field.__dict__ before being output. 11 | # If a column type is set to None, it won't be included in the output. 12 | data_types = { 13 | 'AutoField': 'serial', 14 | 'BooleanField': 'boolean', 15 | 'CharField': 'varchar(%(max_length)s)', 16 | 'CommaSeparatedIntegerField': 'varchar(%(max_length)s)', 17 | 'DateField': 'date', 18 | 'DateTimeField': 'timestamp with time zone', 19 | 'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)', 20 | 'FileField': 'varchar(%(max_length)s)', 21 | 'FilePathField': 'varchar(%(max_length)s)', 22 | 'FloatField': 'double precision', 23 | 'IntegerField': 'integer', 24 | 'BigIntegerField': 'bigint', 25 | 'IPAddressField': 'inet', 26 | 'GenericIPAddressField': 'inet', 27 | 'NullBooleanField': 'boolean', 28 | 'OneToOneField': 'integer', 29 | 'PositiveIntegerField': 'integer CHECK ("%(column)s" >= 0)', 30 | 'PositiveSmallIntegerField': 'smallint CHECK ("%(column)s" >= 0)', 31 | 'SlugField': 'varchar(%(max_length)s)', 32 | 'SmallIntegerField': 'smallint', 33 | 'TextField': 'text', 34 | 'TimeField': 'time', 35 | } 36 | 37 | def sql_table_creation_suffix(self): 38 | assert self.connection.settings_dict['TEST_COLLATION'] is None, "PostgreSQL does not support collation setting at database creation time." 39 | if self.connection.settings_dict['TEST_CHARSET']: 40 | return "WITH ENCODING '%s'" % self.connection.settings_dict['TEST_CHARSET'] 41 | return '' 42 | 43 | def sql_indexes_for_field(self, model, f, style): 44 | output = [] 45 | if f.db_index or f.unique: 46 | qn = self.connection.ops.quote_name 47 | db_table = model._meta.db_table 48 | tablespace = f.db_tablespace or model._meta.db_tablespace 49 | if tablespace: 50 | tablespace_sql = self.connection.ops.tablespace_sql(tablespace) 51 | if tablespace_sql: 52 | tablespace_sql = ' ' + tablespace_sql 53 | else: 54 | tablespace_sql = '' 55 | 56 | def get_index_sql(index_name, opclass=''): 57 | return (style.SQL_KEYWORD('CREATE INDEX') + ' ' + 58 | style.SQL_TABLE(qn(truncate_name(index_name,self.connection.ops.max_name_length()))) + ' ' + 59 | style.SQL_KEYWORD('ON') + ' ' + 60 | style.SQL_TABLE(qn(db_table)) + ' ' + 61 | "(%s%s)" % (style.SQL_FIELD(qn(f.column)), opclass) + 62 | "%s;" % tablespace_sql) 63 | 64 | if not f.unique: 65 | output = [get_index_sql('%s_%s' % (db_table, f.column))] 66 | 67 | # Fields with database column types of `varchar` and `text` need 68 | # a second index that specifies their operator class, which is 69 | # needed when performing correct LIKE queries outside the 70 | # C locale. See #12234. 71 | db_type = f.db_type(connection=self.connection) 72 | if db_type.startswith('varchar'): 73 | output.append(get_index_sql('%s_%s_like' % (db_table, f.column), 74 | ' varchar_pattern_ops')) 75 | elif db_type.startswith('text'): 76 | output.append(get_index_sql('%s_%s_like' % (db_table, f.column), 77 | ' text_pattern_ops')) 78 | return output 79 | 80 | def set_autocommit(self): 81 | self._prepare_for_test_db_ddl() 82 | 83 | def _prepare_for_test_db_ddl(self): 84 | """Rollback and close the active transaction.""" 85 | self.connection.connection.rollback() 86 | self.connection.connection.set_isolation_level( 87 | psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) 88 | -------------------------------------------------------------------------------- /django-redshift/introspection.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | from django.db.backends import BaseDatabaseIntrospection 4 | 5 | 6 | class DatabaseIntrospection(BaseDatabaseIntrospection): 7 | # Maps type codes to Django Field types. 8 | data_types_reverse = { 9 | 16: 'BooleanField', 10 | 20: 'BigIntegerField', 11 | 21: 'SmallIntegerField', 12 | 23: 'IntegerField', 13 | 25: 'TextField', 14 | 700: 'FloatField', 15 | 701: 'FloatField', 16 | 869: 'GenericIPAddressField', 17 | 1042: 'CharField', # blank-padded 18 | 1043: 'CharField', 19 | 1082: 'DateField', 20 | 1083: 'TimeField', 21 | 1114: 'DateTimeField', 22 | 1184: 'DateTimeField', 23 | 1266: 'TimeField', 24 | 1700: 'DecimalField', 25 | } 26 | 27 | def get_table_list(self, cursor): 28 | "Returns a list of table names in the current database." 29 | cursor.execute(""" 30 | SELECT c.relname 31 | FROM pg_catalog.pg_class c 32 | LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace 33 | WHERE c.relkind IN ('r', 'v', '') 34 | AND n.nspname NOT IN ('pg_catalog', 'pg_toast') 35 | AND pg_catalog.pg_table_is_visible(c.oid)""") 36 | return [row[0] for row in cursor.fetchall()] 37 | 38 | def get_table_description(self, cursor, table_name): 39 | "Returns a description of the table, with the DB-API cursor.description interface." 40 | # As cursor.description does not return reliably the nullable property, 41 | # we have to query the information_schema (#7783) 42 | cursor.execute(""" 43 | SELECT column_name, is_nullable 44 | FROM information_schema.columns 45 | WHERE table_name = %s""", [table_name]) 46 | null_map = dict(cursor.fetchall()) 47 | cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name)) 48 | return [line[:6] + (null_map[line[0]]=='YES',) 49 | for line in cursor.description] 50 | 51 | def get_relations(self, cursor, table_name): 52 | """ 53 | Returns a dictionary of {field_index: (field_index_other_table, other_table)} 54 | representing all relationships to the given table. Indexes are 0-based. 55 | """ 56 | cursor.execute(""" 57 | SELECT con.conkey, con.confkey, c2.relname 58 | FROM pg_constraint con, pg_class c1, pg_class c2 59 | WHERE c1.oid = con.conrelid 60 | AND c2.oid = con.confrelid 61 | AND c1.relname = %s 62 | AND con.contype = 'f'""", [table_name]) 63 | relations = {} 64 | for row in cursor.fetchall(): 65 | # row[0] and row[1] are single-item lists, so grab the single item. 66 | relations[row[0][0] - 1] = (row[1][0] - 1, row[2]) 67 | return relations 68 | 69 | def get_indexes(self, cursor, table_name): 70 | # This query retrieves each index on the given table, including the 71 | # first associated field name 72 | cursor.execute(""" 73 | SELECT attr.attname, idx.indkey, idx.indisunique, idx.indisprimary 74 | FROM pg_catalog.pg_class c, pg_catalog.pg_class c2, 75 | pg_catalog.pg_index idx, pg_catalog.pg_attribute attr 76 | WHERE c.oid = idx.indrelid 77 | AND idx.indexrelid = c2.oid 78 | AND attr.attrelid = c.oid 79 | AND attr.attnum = idx.indkey[0] 80 | AND c.relname = %s""", [table_name]) 81 | indexes = {} 82 | for row in cursor.fetchall(): 83 | # row[1] (idx.indkey) is stored in the DB as an array. It comes out as 84 | # a string of space-separated integers. This designates the field 85 | # indexes (1-based) of the fields that have indexes on the table. 86 | # Here, we skip any indexes across multiple fields. 87 | if ' ' in row[1]: 88 | continue 89 | indexes[row[0]] = {'primary_key': row[3], 'unique': row[2]} 90 | return indexes 91 | -------------------------------------------------------------------------------- /django-redshift/operations.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | from django.db.backends import BaseDatabaseOperations 4 | 5 | 6 | class DatabaseOperations(BaseDatabaseOperations): 7 | def __init__(self, connection): 8 | super(DatabaseOperations, self).__init__(connection) 9 | 10 | def date_extract_sql(self, lookup_type, field_name): 11 | # http://www.postgresql.org/docs/8.0/static/functions-datetime.html#FUNCTIONS-DATETIME-EXTRACT 12 | if lookup_type == 'week_day': 13 | # For consistency across backends, we return Sunday=1, Saturday=7. 14 | return "EXTRACT('dow' FROM %s) + 1" % field_name 15 | else: 16 | return "EXTRACT('%s' FROM %s)" % (lookup_type, field_name) 17 | 18 | def date_interval_sql(self, sql, connector, timedelta): 19 | """ 20 | implements the interval functionality for expressions 21 | format for Postgres: 22 | (datefield + interval '3 days 200 seconds 5 microseconds') 23 | """ 24 | modifiers = [] 25 | if timedelta.days: 26 | modifiers.append('%s days' % timedelta.days) 27 | if timedelta.seconds: 28 | modifiers.append('%s seconds' % timedelta.seconds) 29 | if timedelta.microseconds: 30 | modifiers.append('%s microseconds' % timedelta.microseconds) 31 | mods = ' '.join(modifiers) 32 | conn = ' %s ' % connector 33 | return '(%s)' % conn.join([sql, 'interval \'%s\'' % mods]) 34 | 35 | def date_trunc_sql(self, lookup_type, field_name): 36 | # http://www.postgresql.org/docs/8.0/static/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC 37 | return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name) 38 | 39 | def deferrable_sql(self): 40 | return " DEFERRABLE INITIALLY DEFERRED" 41 | 42 | def lookup_cast(self, lookup_type): 43 | lookup = '%s' 44 | 45 | # Cast text lookups to text to allow things like filter(x__contains=4) 46 | if lookup_type in ('iexact', 'contains', 'icontains', 'startswith', 47 | 'istartswith', 'endswith', 'iendswith'): 48 | lookup = "%s::text" 49 | 50 | # Use UPPER(x) for case-insensitive lookups; it's faster. 51 | if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'): 52 | lookup = 'UPPER(%s)' % lookup 53 | 54 | return lookup 55 | 56 | def field_cast_sql(self, db_type): 57 | if db_type == 'inet': 58 | return 'HOST(%s)' 59 | return '%s' 60 | 61 | def last_insert_id(self, cursor, table_name, pk_name): 62 | # Use pg_get_serial_sequence to get the underlying sequence name 63 | # from the table name and column name (available since PostgreSQL 8) 64 | cursor.execute("SELECT CURRVAL(pg_get_serial_sequence('%s','%s'))" % ( 65 | self.quote_name(table_name), pk_name)) 66 | return cursor.fetchone()[0] 67 | 68 | def no_limit_value(self): 69 | return None 70 | 71 | def quote_name(self, name): 72 | if name.startswith('"') and name.endswith('"'): 73 | return name # Quoting once is enough. 74 | return '"%s"' % name 75 | 76 | def set_time_zone_sql(self): 77 | return "SET TIME ZONE %s" 78 | 79 | def sql_flush(self, style, tables, sequences): 80 | if tables: 81 | # Perform a single SQL 'TRUNCATE x, y, z...;' statement. It allows 82 | # us to truncate tables referenced by a foreign key in any other 83 | # table. 84 | sql = ['%s %s;' % \ 85 | (style.SQL_KEYWORD('TRUNCATE'), 86 | style.SQL_FIELD(', '.join([self.quote_name(table) for table in tables])) 87 | )] 88 | sql.extend(self.sequence_reset_by_name_sql(style, sequences)) 89 | return sql 90 | else: 91 | return [] 92 | 93 | def sequence_reset_by_name_sql(self, style, sequences): 94 | # 'ALTER SEQUENCE sequence_name RESTART WITH 1;'... style SQL statements 95 | # to reset sequence indices 96 | sql = [] 97 | for sequence_info in sequences: 98 | table_name = sequence_info['table'] 99 | column_name = sequence_info['column'] 100 | if not (column_name and len(column_name) > 0): 101 | # This will be the case if it's an m2m using an autogenerated 102 | # intermediate table (see BaseDatabaseIntrospection.sequence_list) 103 | column_name = 'id' 104 | sql.append("%s setval(pg_get_serial_sequence('%s','%s'), 1, false);" % \ 105 | (style.SQL_KEYWORD('SELECT'), 106 | style.SQL_TABLE(self.quote_name(table_name)), 107 | style.SQL_FIELD(column_name)) 108 | ) 109 | return sql 110 | 111 | def tablespace_sql(self, tablespace, inline=False): 112 | if inline: 113 | return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace) 114 | else: 115 | return "TABLESPACE %s" % self.quote_name(tablespace) 116 | 117 | def sequence_reset_sql(self, style, model_list): 118 | from django.db import models 119 | output = [] 120 | qn = self.quote_name 121 | for model in model_list: 122 | # Use `coalesce` to set the sequence for each model to the max pk value if there are records, 123 | # or 1 if there are none. Set the `is_called` property (the third argument to `setval`) to true 124 | # if there are records (as the max pk value is already in use), otherwise set it to false. 125 | # Use pg_get_serial_sequence to get the underlying sequence name from the table name 126 | # and column name (available since PostgreSQL 8) 127 | 128 | for f in model._meta.local_fields: 129 | if isinstance(f, models.AutoField): 130 | output.append("%s setval(pg_get_serial_sequence('%s','%s'), coalesce(max(%s), 1), max(%s) %s null) %s %s;" % \ 131 | (style.SQL_KEYWORD('SELECT'), 132 | style.SQL_TABLE(qn(model._meta.db_table)), 133 | style.SQL_FIELD(f.column), 134 | style.SQL_FIELD(qn(f.column)), 135 | style.SQL_FIELD(qn(f.column)), 136 | style.SQL_KEYWORD('IS NOT'), 137 | style.SQL_KEYWORD('FROM'), 138 | style.SQL_TABLE(qn(model._meta.db_table)))) 139 | break # Only one AutoField is allowed per model, so don't bother continuing. 140 | for f in model._meta.many_to_many: 141 | if not f.rel.through: 142 | output.append("%s setval(pg_get_serial_sequence('%s','%s'), coalesce(max(%s), 1), max(%s) %s null) %s %s;" % \ 143 | (style.SQL_KEYWORD('SELECT'), 144 | style.SQL_TABLE(qn(f.m2m_db_table())), 145 | style.SQL_FIELD('id'), 146 | style.SQL_FIELD(qn('id')), 147 | style.SQL_FIELD(qn('id')), 148 | style.SQL_KEYWORD('IS NOT'), 149 | style.SQL_KEYWORD('FROM'), 150 | style.SQL_TABLE(qn(f.m2m_db_table())))) 151 | return output 152 | 153 | def savepoint_create_sql(self, sid): 154 | return "SAVEPOINT %s" % sid 155 | 156 | def savepoint_commit_sql(self, sid): 157 | return "RELEASE SAVEPOINT %s" % sid 158 | 159 | def savepoint_rollback_sql(self, sid): 160 | return "ROLLBACK TO SAVEPOINT %s" % sid 161 | 162 | def prep_for_iexact_query(self, x): 163 | return x 164 | 165 | def check_aggregate_support(self, aggregate): 166 | """Check that the backend fully supports the provided aggregate. 167 | 168 | The implementation of population statistics (STDDEV_POP and VAR_POP) 169 | under Postgres 8.2 - 8.2.4 is known to be faulty. Raise 170 | NotImplementedError if this is the database in use. 171 | """ 172 | if aggregate.sql_function in ('STDDEV_POP', 'VAR_POP'): 173 | pg_version = self.connection.pg_version 174 | if pg_version >= 80200 and pg_version <= 80204: 175 | raise NotImplementedError('PostgreSQL 8.2 to 8.2.4 is known to have a faulty implementation of %s. Please upgrade your version of PostgreSQL.' % aggregate.sql_function) 176 | 177 | def max_name_length(self): 178 | """ 179 | Returns the maximum length of an identifier. 180 | 181 | Note that the maximum length of an identifier is 63 by default, but can 182 | be changed by recompiling PostgreSQL after editing the NAMEDATALEN 183 | macro in src/include/pg_config_manual.h . 184 | 185 | This implementation simply returns 63, but can easily be overridden by a 186 | custom database backend that inherits most of its behavior from this one. 187 | """ 188 | 189 | return 63 190 | 191 | def distinct_sql(self, fields): 192 | if fields: 193 | return 'DISTINCT ON (%s)' % ', '.join(fields) 194 | else: 195 | return 'DISTINCT' 196 | 197 | def last_executed_query(self, cursor, sql, params): 198 | # http://initd.org/psycopg/docs/cursor.html#cursor.query 199 | # The query attribute is a Psycopg extension to the DB API 2.0. 200 | if cursor.query is not None: 201 | return cursor.query.decode('utf-8') 202 | return None 203 | 204 | def return_insert_id(self): 205 | return "RETURNING %s", () 206 | 207 | def bulk_insert_sql(self, fields, num_values): 208 | items_sql = "(%s)" % ", ".join(["%s"] * len(fields)) 209 | return "VALUES " + ", ".join([items_sql] * num_values) 210 | -------------------------------------------------------------------------------- /django-redshift/version.py: -------------------------------------------------------------------------------- 1 | """ 2 | Extracts the version of the PostgreSQL server. 3 | """ 4 | 5 | import re 6 | 7 | # This reg-exp is intentionally fairly flexible here. 8 | # Needs to be able to handle stuff like: 9 | # PostgreSQL 8.3.6 10 | # EnterpriseDB 8.3 11 | # PostgreSQL 8.3 beta4 12 | # PostgreSQL 8.4beta1 13 | VERSION_RE = re.compile(r'\S+ (\d+)\.(\d+)\.?(\d+)?') 14 | 15 | 16 | def _parse_version(text): 17 | "Internal parsing method. Factored out for testing purposes." 18 | major, major2, minor = VERSION_RE.search(text).groups() 19 | try: 20 | return int(major) * 10000 + int(major2) * 100 + int(minor) 21 | except (ValueError, TypeError): 22 | return int(major) * 10000 + int(major2) * 100 23 | 24 | def get_version(connection): 25 | """ 26 | Returns an integer representing the major, minor and revision number of the 27 | server. Format is the one used for the return value of libpq 28 | PQServerVersion()/``server_version`` connection attribute (available in 29 | newer psycopg2 versions.) 30 | 31 | For example, 80304 for 8.3.4. The last two digits will be 00 in the case of 32 | releases (e.g., 80400 for 'PostgreSQL 8.4') or in the case of beta and 33 | prereleases (e.g. 90100 for 'PostgreSQL 9.1beta2'). 34 | 35 | PQServerVersion()/``server_version`` doesn't execute a query so try that 36 | first, then fallback to a ``SELECT version()`` query. 37 | """ 38 | if hasattr(connection, 'server_version'): 39 | return connection.server_version 40 | else: 41 | cursor = connection.cursor() 42 | cursor.execute("SELECT version()") 43 | return _parse_version(cursor.fetchone()[0]) 44 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup( 4 | name='django-redshift', 5 | version='0.2', 6 | description='Amazon Redshift Dialect for django', 7 | long_description=open("README.rst").read(), 8 | author='Matt George', 9 | author_email='mgeorge@gmail.com', 10 | license="MIT", 11 | url='https://github.com/binarydud/django-redshift', 12 | packages=['django-redshift'], 13 | install_requires=['psycopg2==2.5'], 14 | include_package_data=True, 15 | zip_safe=False, 16 | classifiers=[ 17 | "Development Status :: 3 - Alpha", 18 | "Environment :: Console", 19 | "Intended Audience :: Developers", 20 | "License :: OSI Approved :: MIT License", 21 | "Operating System :: OS Independent", 22 | "Programming Language :: Python", 23 | ] 24 | ) 25 | 26 | --------------------------------------------------------------------------------