root / env / lib / python2.7 / site-packages / south / db / generic.py @ d1a4905f
History | View | Annotate | Download (43.4 KB)
1 |
import re |
---|---|
2 |
import sys |
3 |
|
4 |
from django.core.management.color import no_style |
5 |
from django.db import transaction, models |
6 |
from django.db.utils import DatabaseError |
7 |
from django.db.backends.util import truncate_name |
8 |
from django.db.backends.creation import BaseDatabaseCreation |
9 |
from django.db.models.fields import NOT_PROVIDED |
10 |
from django.dispatch import dispatcher |
11 |
from django.conf import settings |
12 |
from django.utils.datastructures import SortedDict |
13 |
try:
|
14 |
from django.utils.functional import cached_property |
15 |
except ImportError: |
16 |
class cached_property(object): |
17 |
"""
|
18 |
Decorator that creates converts a method with a single
|
19 |
self argument into a property cached on the instance.
|
20 |
"""
|
21 |
def __init__(self, func): |
22 |
self.func = func
|
23 |
|
24 |
def __get__(self, instance, type): |
25 |
res = instance.__dict__[self.func.__name__] = self.func(instance) |
26 |
return res
|
27 |
|
28 |
from south.logger import get_logger |
29 |
|
30 |
|
31 |
def alias(attrname): |
32 |
"""
|
33 |
Returns a function which calls 'attrname' - for function aliasing.
|
34 |
We can't just use foo = bar, as this breaks subclassing.
|
35 |
"""
|
36 |
def func(self, *args, **kwds): |
37 |
return getattr(self, attrname)(*args, **kwds) |
38 |
return func
|
39 |
|
40 |
|
41 |
def invalidate_table_constraints(func): |
42 |
def _cache_clear(self, table, *args, **opts): |
43 |
self._set_cache(table, value=INVALID)
|
44 |
return func(self, table, *args, **opts) |
45 |
return _cache_clear
|
46 |
|
47 |
|
48 |
def delete_column_constraints(func): |
49 |
def _column_rm(self, table, column, *args, **opts): |
50 |
self._set_cache(table, column, value=[])
|
51 |
return func(self, table, column, *args, **opts) |
52 |
return _column_rm
|
53 |
|
54 |
|
55 |
def copy_column_constraints(func): |
56 |
def _column_cp(self, table, column_old, column_new, *args, **opts): |
57 |
db_name = self._get_setting('NAME') |
58 |
self._set_cache(table, column_new, value=self.lookup_constraint(db_name, table, column_old)) |
59 |
return func(self, table, column_old, column_new, *args, **opts) |
60 |
return _column_cp
|
61 |
|
62 |
|
63 |
class INVALID(Exception): |
64 |
def __repr__(self): |
65 |
return 'INVALID' |
66 |
|
67 |
|
68 |
class DryRunError(ValueError): |
69 |
pass
|
70 |
|
71 |
|
72 |
class DatabaseOperations(object): |
73 |
"""
|
74 |
Generic SQL implementation of the DatabaseOperations.
|
75 |
Some of this code comes from Django Evolution.
|
76 |
"""
|
77 |
|
78 |
alter_string_set_type = 'ALTER COLUMN %(column)s TYPE %(type)s'
|
79 |
alter_string_set_null = 'ALTER COLUMN %(column)s DROP NOT NULL'
|
80 |
alter_string_drop_null = 'ALTER COLUMN %(column)s SET NOT NULL'
|
81 |
delete_check_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s'
|
82 |
add_column_string = 'ALTER TABLE %s ADD COLUMN %s;'
|
83 |
delete_unique_sql = "ALTER TABLE %s DROP CONSTRAINT %s"
|
84 |
delete_foreign_key_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s'
|
85 |
max_index_name_length = 63
|
86 |
drop_index_string = 'DROP INDEX %(index_name)s'
|
87 |
delete_column_string = 'ALTER TABLE %s DROP COLUMN %s CASCADE;'
|
88 |
create_primary_key_string = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)"
|
89 |
delete_primary_key_sql = "ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s"
|
90 |
add_check_constraint_fragment = "ADD CONSTRAINT %(constraint)s CHECK (%(check)s)"
|
91 |
rename_table_sql = "ALTER TABLE %s RENAME TO %s;"
|
92 |
backend_name = None
|
93 |
default_schema_name = "public"
|
94 |
|
95 |
# Features
|
96 |
allows_combined_alters = True
|
97 |
supports_foreign_keys = True
|
98 |
has_check_constraints = True
|
99 |
has_booleans = True
|
100 |
|
101 |
@cached_property
|
102 |
def has_ddl_transactions(self): |
103 |
"""
|
104 |
Tests the database using feature detection to see if it has
|
105 |
transactional DDL support.
|
106 |
"""
|
107 |
self._possibly_initialise()
|
108 |
connection = self._get_connection()
|
109 |
if hasattr(connection.features, "confirm") and not connection.features._confirmed: |
110 |
connection.features.confirm() |
111 |
# Django 1.3's MySQLdb backend doesn't raise DatabaseError
|
112 |
exceptions = (DatabaseError, ) |
113 |
try:
|
114 |
from MySQLdb import OperationalError |
115 |
exceptions += (OperationalError, ) |
116 |
except ImportError: |
117 |
pass
|
118 |
# Now do the test
|
119 |
if getattr(connection.features, 'supports_transactions', True): |
120 |
cursor = connection.cursor() |
121 |
self.start_transaction()
|
122 |
cursor.execute('CREATE TABLE DDL_TRANSACTION_TEST (X INT)')
|
123 |
self.rollback_transaction()
|
124 |
try:
|
125 |
try:
|
126 |
cursor.execute('CREATE TABLE DDL_TRANSACTION_TEST (X INT)')
|
127 |
except exceptions:
|
128 |
return False |
129 |
else:
|
130 |
return True |
131 |
finally:
|
132 |
cursor.execute('DROP TABLE DDL_TRANSACTION_TEST')
|
133 |
else:
|
134 |
return False |
135 |
|
136 |
def __init__(self, db_alias): |
137 |
self.debug = False |
138 |
self.deferred_sql = []
|
139 |
self.dry_run = False |
140 |
self.pending_transactions = 0 |
141 |
self.pending_create_signals = []
|
142 |
self.db_alias = db_alias
|
143 |
self._constraint_cache = {}
|
144 |
self._initialised = False |
145 |
|
146 |
def lookup_constraint(self, db_name, table_name, column_name=None): |
147 |
""" return a set() of constraints for db_name.table_name.column_name """
|
148 |
def _lookup(): |
149 |
table = self._constraint_cache[db_name][table_name]
|
150 |
if table is INVALID: |
151 |
raise INVALID
|
152 |
elif column_name is None: |
153 |
return table.items()
|
154 |
else:
|
155 |
return table[column_name]
|
156 |
|
157 |
try:
|
158 |
ret = _lookup() |
159 |
return ret
|
160 |
except INVALID:
|
161 |
del self._constraint_cache[db_name][table_name] |
162 |
self._fill_constraint_cache(db_name, table_name)
|
163 |
except KeyError: |
164 |
if self._is_valid_cache(db_name, table_name): |
165 |
return []
|
166 |
self._fill_constraint_cache(db_name, table_name)
|
167 |
|
168 |
return self.lookup_constraint(db_name, table_name, column_name) |
169 |
|
170 |
def _set_cache(self, table_name, column_name=None, value=INVALID): |
171 |
db_name = self._get_setting('NAME') |
172 |
try:
|
173 |
if column_name is not None: |
174 |
self._constraint_cache[db_name][table_name][column_name] = value
|
175 |
else:
|
176 |
self._constraint_cache[db_name][table_name] = value
|
177 |
except (LookupError, TypeError): |
178 |
pass
|
179 |
|
180 |
def _is_valid_cache(self, db_name, table_name): |
181 |
# we cache per-table so if the table is there it is valid
|
182 |
try:
|
183 |
return self._constraint_cache[db_name][table_name] is not INVALID |
184 |
except KeyError: |
185 |
return False |
186 |
|
187 |
def _is_multidb(self): |
188 |
try:
|
189 |
from django.db import connections |
190 |
connections # Prevents "unused import" warning
|
191 |
except ImportError: |
192 |
return False |
193 |
else:
|
194 |
return True |
195 |
|
196 |
def _get_connection(self): |
197 |
"""
|
198 |
Returns a django connection for a given DB Alias
|
199 |
"""
|
200 |
if self._is_multidb(): |
201 |
from django.db import connections |
202 |
return connections[self.db_alias] |
203 |
else:
|
204 |
from django.db import connection |
205 |
return connection
|
206 |
|
207 |
def _get_setting(self, setting_name): |
208 |
"""
|
209 |
Allows code to get a setting (like, for example, STORAGE_ENGINE)
|
210 |
"""
|
211 |
setting_name = setting_name.upper() |
212 |
connection = self._get_connection()
|
213 |
if self._is_multidb(): |
214 |
# Django 1.2 and above
|
215 |
return connection.settings_dict[setting_name]
|
216 |
else:
|
217 |
# Django 1.1 and below
|
218 |
return getattr(settings, "DATABASE_%s" % setting_name) |
219 |
|
220 |
def _has_setting(self, setting_name): |
221 |
"""
|
222 |
Existence-checking version of _get_setting.
|
223 |
"""
|
224 |
try:
|
225 |
self._get_setting(setting_name)
|
226 |
except (KeyError, AttributeError): |
227 |
return False |
228 |
else:
|
229 |
return True |
230 |
|
231 |
def _get_schema_name(self): |
232 |
try:
|
233 |
return self._get_setting('schema') |
234 |
except (KeyError, AttributeError): |
235 |
return self.default_schema_name |
236 |
|
237 |
def _possibly_initialise(self): |
238 |
if not self._initialised: |
239 |
self.connection_init()
|
240 |
self._initialised = True |
241 |
|
242 |
def connection_init(self): |
243 |
"""
|
244 |
Run before any SQL to let database-specific config be sent as a command,
|
245 |
e.g. which storage engine (MySQL) or transaction serialisability level.
|
246 |
"""
|
247 |
pass
|
248 |
|
249 |
def quote_name(self, name): |
250 |
"""
|
251 |
Uses the database backend to quote the given table/column name.
|
252 |
"""
|
253 |
return self._get_connection().ops.quote_name(name) |
254 |
|
255 |
def execute(self, sql, params=[]): |
256 |
"""
|
257 |
Executes the given SQL statement, with optional parameters.
|
258 |
If the instance's debug attribute is True, prints out what it executes.
|
259 |
"""
|
260 |
|
261 |
self._possibly_initialise()
|
262 |
|
263 |
cursor = self._get_connection().cursor()
|
264 |
if self.debug: |
265 |
print " = %s" % sql, params |
266 |
|
267 |
if self.dry_run: |
268 |
return []
|
269 |
|
270 |
get_logger().debug('execute "%s" with params "%s"' % (sql, params))
|
271 |
|
272 |
try:
|
273 |
cursor.execute(sql, params) |
274 |
except DatabaseError, e:
|
275 |
print >> sys.stderr, 'FATAL ERROR - The following SQL query failed: %s' % sql |
276 |
print >> sys.stderr, 'The error was: %s' % e |
277 |
raise
|
278 |
|
279 |
try:
|
280 |
return cursor.fetchall()
|
281 |
except:
|
282 |
return []
|
283 |
|
284 |
def execute_many(self, sql, regex=r"(?mx) ([^';]* (?:'[^']*'[^';]*)*)", comment_regex=r"(?mx) (?:^\s*$)|(?:--.*$)"): |
285 |
"""
|
286 |
Takes a SQL file and executes it as many separate statements.
|
287 |
(Some backends, such as Postgres, don't work otherwise.)
|
288 |
"""
|
289 |
# Be warned: This function is full of dark magic. Make sure you really
|
290 |
# know regexes before trying to edit it.
|
291 |
# First, strip comments
|
292 |
sql = "\n".join([x.strip().replace("%", "%%") for x in re.split(comment_regex, sql) if x.strip()]) |
293 |
# Now execute each statement
|
294 |
for st in re.split(regex, sql)[1:][::2]: |
295 |
self.execute(st)
|
296 |
|
297 |
def add_deferred_sql(self, sql): |
298 |
"""
|
299 |
Add a SQL statement to the deferred list, that won't be executed until
|
300 |
this instance's execute_deferred_sql method is run.
|
301 |
"""
|
302 |
self.deferred_sql.append(sql)
|
303 |
|
304 |
def execute_deferred_sql(self): |
305 |
"""
|
306 |
Executes all deferred SQL, resetting the deferred_sql list
|
307 |
"""
|
308 |
for sql in self.deferred_sql: |
309 |
self.execute(sql)
|
310 |
|
311 |
self.deferred_sql = []
|
312 |
|
313 |
def clear_deferred_sql(self): |
314 |
"""
|
315 |
Resets the deferred_sql list to empty.
|
316 |
"""
|
317 |
self.deferred_sql = []
|
318 |
|
319 |
def clear_run_data(self, pending_creates = None): |
320 |
"""
|
321 |
Resets variables to how they should be before a run. Used for dry runs.
|
322 |
If you want, pass in an old panding_creates to reset to.
|
323 |
"""
|
324 |
self.clear_deferred_sql()
|
325 |
self.pending_create_signals = pending_creates or [] |
326 |
|
327 |
def get_pending_creates(self): |
328 |
return self.pending_create_signals |
329 |
|
330 |
@invalidate_table_constraints
|
331 |
def create_table(self, table_name, fields): |
332 |
"""
|
333 |
Creates the table 'table_name'. 'fields' is a tuple of fields,
|
334 |
each repsented by a 2-part tuple of field name and a
|
335 |
django.db.models.fields.Field object
|
336 |
"""
|
337 |
|
338 |
if len(table_name) > 63: |
339 |
print " ! WARNING: You have a table name longer than 63 characters; this will not fully work on PostgreSQL or MySQL." |
340 |
|
341 |
# avoid default values in CREATE TABLE statements (#925)
|
342 |
for field_name, field in fields: |
343 |
field._suppress_default = True
|
344 |
|
345 |
columns = [ |
346 |
self.column_sql(table_name, field_name, field)
|
347 |
for field_name, field in fields |
348 |
] |
349 |
|
350 |
self.execute('CREATE TABLE %s (%s);' % ( |
351 |
self.quote_name(table_name),
|
352 |
', '.join([col for col in columns if col]), |
353 |
)) |
354 |
|
355 |
add_table = alias('create_table') # Alias for consistency's sake |
356 |
|
357 |
@invalidate_table_constraints
|
358 |
def rename_table(self, old_table_name, table_name): |
359 |
"""
|
360 |
Renames the table 'old_table_name' to 'table_name'.
|
361 |
"""
|
362 |
if old_table_name == table_name:
|
363 |
# Short-circuit out.
|
364 |
return
|
365 |
params = (self.quote_name(old_table_name), self.quote_name(table_name)) |
366 |
self.execute(self.rename_table_sql % params) |
367 |
# Invalidate the not-yet-indexed table
|
368 |
self._set_cache(table_name, value=INVALID)
|
369 |
|
370 |
@invalidate_table_constraints
|
371 |
def delete_table(self, table_name, cascade=True): |
372 |
"""
|
373 |
Deletes the table 'table_name'.
|
374 |
"""
|
375 |
params = (self.quote_name(table_name), )
|
376 |
if cascade:
|
377 |
self.execute('DROP TABLE %s CASCADE;' % params) |
378 |
else:
|
379 |
self.execute('DROP TABLE %s;' % params) |
380 |
|
381 |
drop_table = alias('delete_table')
|
382 |
|
383 |
@invalidate_table_constraints
|
384 |
def clear_table(self, table_name): |
385 |
"""
|
386 |
Deletes all rows from 'table_name'.
|
387 |
"""
|
388 |
params = (self.quote_name(table_name), )
|
389 |
self.execute('DELETE FROM %s;' % params) |
390 |
|
391 |
@invalidate_table_constraints
|
392 |
def add_column(self, table_name, name, field, keep_default=True): |
393 |
"""
|
394 |
Adds the column 'name' to the table 'table_name'.
|
395 |
Uses the 'field' paramater, a django.db.models.fields.Field instance,
|
396 |
to generate the necessary sql
|
397 |
|
398 |
@param table_name: The name of the table to add the column to
|
399 |
@param name: The name of the column to add
|
400 |
@param field: The field to use
|
401 |
"""
|
402 |
sql = self.column_sql(table_name, name, field)
|
403 |
if sql:
|
404 |
params = ( |
405 |
self.quote_name(table_name),
|
406 |
sql, |
407 |
) |
408 |
sql = self.add_column_string % params
|
409 |
self.execute(sql)
|
410 |
|
411 |
# Now, drop the default if we need to
|
412 |
if not keep_default and field.default is not None: |
413 |
field.default = NOT_PROVIDED |
414 |
self.alter_column(table_name, name, field, explicit_name=False, ignore_constraints=True) |
415 |
|
416 |
def _db_type_for_alter_column(self, field): |
417 |
"""
|
418 |
Returns a field's type suitable for ALTER COLUMN.
|
419 |
By default it just returns field.db_type().
|
420 |
To be overriden by backend specific subclasses
|
421 |
@param field: The field to generate type for
|
422 |
"""
|
423 |
try:
|
424 |
return field.db_type(connection=self._get_connection()) |
425 |
except TypeError: |
426 |
return field.db_type()
|
427 |
|
428 |
def _alter_add_column_mods(self, field, name, params, sqls): |
429 |
"""
|
430 |
Subcommand of alter_column that modifies column definitions beyond
|
431 |
the type string -- e.g. adding constraints where they cannot be specified
|
432 |
as part of the type (overrideable)
|
433 |
"""
|
434 |
pass
|
435 |
|
436 |
def _alter_set_defaults(self, field, name, params, sqls): |
437 |
"Subcommand of alter_column that sets default values (overrideable)"
|
438 |
# Next, set any default
|
439 |
if not field.null and field.has_default(): |
440 |
default = field.get_default() |
441 |
sqls.append(('ALTER COLUMN %s SET DEFAULT %%s ' % (self.quote_name(name),), [default])) |
442 |
else:
|
443 |
sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (self.quote_name(name),), [])) |
444 |
|
445 |
@invalidate_table_constraints
|
446 |
def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False): |
447 |
"""
|
448 |
Alters the given column name so it will match the given field.
|
449 |
Note that conversion between the two by the database must be possible.
|
450 |
Will not automatically add _id by default; to have this behavour, pass
|
451 |
explicit_name=False.
|
452 |
|
453 |
@param table_name: The name of the table to add the column to
|
454 |
@param name: The name of the column to alter
|
455 |
@param field: The new field definition to use
|
456 |
"""
|
457 |
|
458 |
if self.dry_run: |
459 |
if self.debug: |
460 |
print ' - no dry run output for alter_column() due to dynamic DDL, sorry' |
461 |
return
|
462 |
|
463 |
# hook for the field to do any resolution prior to it's attributes being queried
|
464 |
if hasattr(field, 'south_init'): |
465 |
field.south_init() |
466 |
|
467 |
# Add _id or whatever if we need to
|
468 |
field.set_attributes_from_name(name) |
469 |
if not explicit_name: |
470 |
name = field.column |
471 |
else:
|
472 |
field.column = name |
473 |
|
474 |
if not ignore_constraints: |
475 |
# Drop all check constraints. Note that constraints will be added back
|
476 |
# with self.alter_string_set_type and self.alter_string_drop_null.
|
477 |
if self.has_check_constraints: |
478 |
check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK") |
479 |
for constraint in check_constraints: |
480 |
self.execute(self.delete_check_sql % { |
481 |
'table': self.quote_name(table_name), |
482 |
'constraint': self.quote_name(constraint), |
483 |
}) |
484 |
|
485 |
# Drop all foreign key constraints
|
486 |
try:
|
487 |
self.delete_foreign_key(table_name, name)
|
488 |
except ValueError: |
489 |
# There weren't any
|
490 |
pass
|
491 |
|
492 |
# First, change the type
|
493 |
params = { |
494 |
"column": self.quote_name(name), |
495 |
"type": self._db_type_for_alter_column(field), |
496 |
"table_name": table_name
|
497 |
} |
498 |
|
499 |
# SQLs is a list of (SQL, values) pairs.
|
500 |
sqls = [] |
501 |
|
502 |
# Only alter the column if it has a type (Geometry ones sometimes don't)
|
503 |
if params["type"] is not None: |
504 |
sqls.append((self.alter_string_set_type % params, []))
|
505 |
|
506 |
# Add any field- and backend- specific modifications
|
507 |
self._alter_add_column_mods(field, name, params, sqls)
|
508 |
# Next, nullity
|
509 |
if field.null:
|
510 |
sqls.append((self.alter_string_set_null % params, []))
|
511 |
else:
|
512 |
sqls.append((self.alter_string_drop_null % params, []))
|
513 |
|
514 |
# Next, set any default
|
515 |
self._alter_set_defaults(field, name, params, sqls)
|
516 |
|
517 |
# Finally, actually change the column
|
518 |
if self.allows_combined_alters: |
519 |
sqls, values = zip(*sqls)
|
520 |
self.execute(
|
521 |
"ALTER TABLE %s %s;" % (self.quote_name(table_name), ", ".join(sqls)), |
522 |
flatten(values), |
523 |
) |
524 |
else:
|
525 |
# Databases like e.g. MySQL don't like more than one alter at once.
|
526 |
for sql, values in sqls: |
527 |
self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), sql), values) |
528 |
|
529 |
if not ignore_constraints: |
530 |
# Add back FK constraints if needed
|
531 |
if field.rel and self.supports_foreign_keys: |
532 |
self.execute(
|
533 |
self.foreign_key_sql(
|
534 |
table_name, |
535 |
field.column, |
536 |
field.rel.to._meta.db_table, |
537 |
field.rel.to._meta.get_field(field.rel.field_name).column |
538 |
) |
539 |
) |
540 |
|
541 |
def _fill_constraint_cache(self, db_name, table_name): |
542 |
|
543 |
schema = self._get_schema_name()
|
544 |
ifsc_tables = ["constraint_column_usage", "key_column_usage"] |
545 |
|
546 |
self._constraint_cache.setdefault(db_name, {})
|
547 |
self._constraint_cache[db_name][table_name] = {}
|
548 |
|
549 |
for ifsc_table in ifsc_tables: |
550 |
rows = self.execute(""" |
551 |
SELECT kc.constraint_name, kc.column_name, c.constraint_type
|
552 |
FROM information_schema.%s AS kc
|
553 |
JOIN information_schema.table_constraints AS c ON
|
554 |
kc.table_schema = c.table_schema AND
|
555 |
kc.table_name = c.table_name AND
|
556 |
kc.constraint_name = c.constraint_name
|
557 |
WHERE
|
558 |
kc.table_schema = %%s AND
|
559 |
kc.table_name = %%s
|
560 |
""" % ifsc_table, [schema, table_name])
|
561 |
for constraint, column, kind in rows: |
562 |
self._constraint_cache[db_name][table_name].setdefault(column, set()) |
563 |
self._constraint_cache[db_name][table_name][column].add((kind, constraint))
|
564 |
return
|
565 |
|
566 |
def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"): |
567 |
"""
|
568 |
Gets the names of the constraints affecting the given columns.
|
569 |
If columns is None, returns all constraints of the type on the table.
|
570 |
"""
|
571 |
if self.dry_run: |
572 |
raise DryRunError("Cannot get constraints for columns.") |
573 |
|
574 |
if columns is not None: |
575 |
columns = set(map(lambda s: s.lower(), columns)) |
576 |
|
577 |
db_name = self._get_setting('NAME') |
578 |
|
579 |
cnames = {} |
580 |
for col, constraints in self.lookup_constraint(db_name, table_name): |
581 |
for kind, cname in constraints: |
582 |
if kind == type: |
583 |
cnames.setdefault(cname, set())
|
584 |
cnames[cname].add(col.lower()) |
585 |
|
586 |
for cname, cols in cnames.items(): |
587 |
if cols == columns or columns is None: |
588 |
yield cname
|
589 |
|
590 |
@invalidate_table_constraints
|
591 |
def create_unique(self, table_name, columns): |
592 |
"""
|
593 |
Creates a UNIQUE constraint on the columns on the given table.
|
594 |
"""
|
595 |
|
596 |
if not isinstance(columns, (list, tuple)): |
597 |
columns = [columns] |
598 |
|
599 |
name = self.create_index_name(table_name, columns, suffix="_uniq") |
600 |
|
601 |
cols = ", ".join(map(self.quote_name, columns)) |
602 |
self.execute("ALTER TABLE %s ADD CONSTRAINT %s UNIQUE (%s)" % ( |
603 |
self.quote_name(table_name),
|
604 |
self.quote_name(name),
|
605 |
cols, |
606 |
)) |
607 |
return name
|
608 |
|
609 |
@invalidate_table_constraints
|
610 |
def delete_unique(self, table_name, columns): |
611 |
"""
|
612 |
Deletes a UNIQUE constraint on precisely the columns on the given table.
|
613 |
"""
|
614 |
|
615 |
if not isinstance(columns, (list, tuple)): |
616 |
columns = [columns] |
617 |
|
618 |
# Dry runs mean we can't do anything.
|
619 |
if self.dry_run: |
620 |
if self.debug: |
621 |
print ' - no dry run output for delete_unique_column() due to dynamic DDL, sorry' |
622 |
return
|
623 |
|
624 |
constraints = list(self._constraints_affecting_columns(table_name, columns)) |
625 |
if not constraints: |
626 |
raise ValueError("Cannot find a UNIQUE constraint on table %s, columns %r" % (table_name, columns)) |
627 |
for constraint in constraints: |
628 |
self.execute(self.delete_unique_sql % ( |
629 |
self.quote_name(table_name),
|
630 |
self.quote_name(constraint),
|
631 |
)) |
632 |
|
633 |
def column_sql(self, table_name, field_name, field, tablespace='', with_name=True, field_prepared=False): |
634 |
"""
|
635 |
Creates the SQL snippet for a column. Used by add_column and add_table.
|
636 |
"""
|
637 |
|
638 |
# If the field hasn't already been told its attribute name, do so.
|
639 |
if not field_prepared: |
640 |
field.set_attributes_from_name(field_name) |
641 |
|
642 |
# hook for the field to do any resolution prior to it's attributes being queried
|
643 |
if hasattr(field, 'south_init'): |
644 |
field.south_init() |
645 |
|
646 |
# Possible hook to fiddle with the fields (e.g. defaults & TEXT on MySQL)
|
647 |
field = self._field_sanity(field)
|
648 |
|
649 |
try:
|
650 |
sql = field.db_type(connection=self._get_connection())
|
651 |
except TypeError: |
652 |
sql = field.db_type() |
653 |
|
654 |
if sql:
|
655 |
|
656 |
# Some callers, like the sqlite stuff, just want the extended type.
|
657 |
if with_name:
|
658 |
field_output = [self.quote_name(field.column), sql]
|
659 |
else:
|
660 |
field_output = [sql] |
661 |
|
662 |
field_output.append('%sNULL' % (not field.null and 'NOT ' or '')) |
663 |
if field.primary_key:
|
664 |
field_output.append('PRIMARY KEY')
|
665 |
elif field.unique:
|
666 |
# Just use UNIQUE (no indexes any more, we have delete_unique)
|
667 |
field_output.append('UNIQUE')
|
668 |
|
669 |
tablespace = field.db_tablespace or tablespace
|
670 |
if tablespace and getattr(self._get_connection().features, "supports_tablespaces", False) and field.unique: |
671 |
# We must specify the index tablespace inline, because we
|
672 |
# won't be generating a CREATE INDEX statement for this field.
|
673 |
field_output.append(self._get_connection().ops.tablespace_sql(tablespace, inline=True)) |
674 |
|
675 |
sql = ' '.join(field_output)
|
676 |
sqlparams = () |
677 |
# if the field is "NOT NULL" and a default value is provided, create the column with it
|
678 |
# this allows the addition of a NOT NULL field to a table with existing rows
|
679 |
if not getattr(field, '_suppress_default', False): |
680 |
if field.has_default():
|
681 |
default = field.get_default() |
682 |
# If the default is actually None, don't add a default term
|
683 |
if default is not None: |
684 |
# If the default is a callable, then call it!
|
685 |
if callable(default): |
686 |
default = default() |
687 |
|
688 |
default = field.get_db_prep_save(default, connection=self._get_connection())
|
689 |
default = self._default_value_workaround(default)
|
690 |
# Now do some very cheap quoting. TODO: Redesign return values to avoid this.
|
691 |
if isinstance(default, basestring): |
692 |
default = "'%s'" % default.replace("'", "''") |
693 |
# Escape any % signs in the output (bug #317)
|
694 |
if isinstance(default, basestring): |
695 |
default = default.replace("%", "%%") |
696 |
# Add it in
|
697 |
sql += " DEFAULT %s"
|
698 |
sqlparams = (default) |
699 |
elif (not field.null and field.blank) or (field.get_default() == ''): |
700 |
if field.empty_strings_allowed and self._get_connection().features.interprets_empty_strings_as_nulls: |
701 |
sql += " DEFAULT ''"
|
702 |
# Error here would be nice, but doesn't seem to play fair.
|
703 |
#else:
|
704 |
# raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.")
|
705 |
|
706 |
if field.rel and self.supports_foreign_keys: |
707 |
self.add_deferred_sql(
|
708 |
self.foreign_key_sql(
|
709 |
table_name, |
710 |
field.column, |
711 |
field.rel.to._meta.db_table, |
712 |
field.rel.to._meta.get_field(field.rel.field_name).column |
713 |
) |
714 |
) |
715 |
|
716 |
# Things like the contrib.gis module fields have this in 1.1 and below
|
717 |
if hasattr(field, 'post_create_sql'): |
718 |
for stmt in field.post_create_sql(no_style(), table_name): |
719 |
self.add_deferred_sql(stmt)
|
720 |
|
721 |
# In 1.2 and above, you have to ask the DatabaseCreation stuff for it.
|
722 |
# This also creates normal indexes in 1.1.
|
723 |
if hasattr(self._get_connection().creation, "sql_indexes_for_field"): |
724 |
# Make a fake model to pass in, with only db_table
|
725 |
model = self.mock_model("FakeModelForGISCreation", table_name) |
726 |
for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()): |
727 |
self.add_deferred_sql(stmt)
|
728 |
|
729 |
if sql:
|
730 |
return sql % sqlparams
|
731 |
else:
|
732 |
return None |
733 |
|
734 |
def _field_sanity(self, field): |
735 |
"""
|
736 |
Placeholder for DBMS-specific field alterations (some combos aren't valid,
|
737 |
e.g. DEFAULT and TEXT on MySQL)
|
738 |
"""
|
739 |
return field
|
740 |
|
741 |
def _default_value_workaround(self, value): |
742 |
"""
|
743 |
DBMS-specific value alterations (this really works around
|
744 |
missing functionality in Django backends)
|
745 |
"""
|
746 |
if isinstance(value, bool) and not self.has_booleans: |
747 |
return int(value) |
748 |
else:
|
749 |
return value
|
750 |
|
751 |
def foreign_key_sql(self, from_table_name, from_column_name, to_table_name, to_column_name): |
752 |
"""
|
753 |
Generates a full SQL statement to add a foreign key constraint
|
754 |
"""
|
755 |
constraint_name = '%s_refs_%s_%x' % (from_column_name, to_column_name, abs(hash((from_table_name, to_table_name)))) |
756 |
return 'ALTER TABLE %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % ( |
757 |
self.quote_name(from_table_name),
|
758 |
self.quote_name(truncate_name(constraint_name, self._get_connection().ops.max_name_length())), |
759 |
self.quote_name(from_column_name),
|
760 |
self.quote_name(to_table_name),
|
761 |
self.quote_name(to_column_name),
|
762 |
self._get_connection().ops.deferrable_sql() # Django knows this |
763 |
) |
764 |
|
765 |
@invalidate_table_constraints
|
766 |
def delete_foreign_key(self, table_name, column): |
767 |
"""
|
768 |
Drop a foreign key constraint
|
769 |
"""
|
770 |
if self.dry_run: |
771 |
if self.debug: |
772 |
print ' - no dry run output for delete_foreign_key() due to dynamic DDL, sorry' |
773 |
return # We can't look at the DB to get the constraints |
774 |
constraints = self._find_foreign_constraints(table_name, column)
|
775 |
if not constraints: |
776 |
raise ValueError("Cannot find a FOREIGN KEY constraint on table %s, column %s" % (table_name, column)) |
777 |
for constraint_name in constraints: |
778 |
self.execute(self.delete_foreign_key_sql % { |
779 |
"table": self.quote_name(table_name), |
780 |
"constraint": self.quote_name(constraint_name), |
781 |
}) |
782 |
|
783 |
drop_foreign_key = alias('delete_foreign_key')
|
784 |
|
785 |
def _find_foreign_constraints(self, table_name, column_name=None): |
786 |
constraints = self._constraints_affecting_columns(
|
787 |
table_name, [column_name], "FOREIGN KEY")
|
788 |
|
789 |
primary_key_columns = self._find_primary_key_columns(table_name)
|
790 |
|
791 |
if len(primary_key_columns) > 1: |
792 |
# Composite primary keys cannot be referenced by a foreign key
|
793 |
return list(constraints) |
794 |
else:
|
795 |
primary_key_columns.add(column_name) |
796 |
recursive_constraints = set(self._constraints_affecting_columns( |
797 |
table_name, primary_key_columns, "FOREIGN KEY"))
|
798 |
return list(recursive_constraints.union(constraints)) |
799 |
|
800 |
def _digest(self, *args): |
801 |
"""
|
802 |
Use django.db.backends.creation.BaseDatabaseCreation._digest
|
803 |
to create index name in Django style. An evil hack :(
|
804 |
"""
|
805 |
if not hasattr(self, '_django_db_creation'): |
806 |
self._django_db_creation = BaseDatabaseCreation(self._get_connection()) |
807 |
return self._django_db_creation._digest(*args) |
808 |
|
809 |
def create_index_name(self, table_name, column_names, suffix=""): |
810 |
"""
|
811 |
Generate a unique name for the index
|
812 |
"""
|
813 |
|
814 |
# If there is just one column in the index, use a default algorithm from Django
|
815 |
if len(column_names) == 1 and not suffix: |
816 |
return truncate_name(
|
817 |
'%s_%s' % (table_name, self._digest(column_names[0])), |
818 |
self._get_connection().ops.max_name_length()
|
819 |
) |
820 |
|
821 |
# Else generate the name for the index by South
|
822 |
table_name = table_name.replace('"', '').replace('.', '_') |
823 |
index_unique_name = '_%x' % abs(hash((table_name, ','.join(column_names)))) |
824 |
|
825 |
# If the index name is too long, truncate it
|
826 |
index_name = ('%s_%s%s%s' % (table_name, column_names[0], index_unique_name, suffix)).replace('"', '').replace('.', '_') |
827 |
if len(index_name) > self.max_index_name_length: |
828 |
part = ('_%s%s%s' % (column_names[0], index_unique_name, suffix)) |
829 |
index_name = '%s%s' % (table_name[:(self.max_index_name_length - len(part))], part) |
830 |
|
831 |
return index_name
|
832 |
|
833 |
def create_index_sql(self, table_name, column_names, unique=False, db_tablespace=''): |
834 |
"""
|
835 |
Generates a create index statement on 'table_name' for a list of 'column_names'
|
836 |
"""
|
837 |
if not column_names: |
838 |
print "No column names supplied on which to create an index" |
839 |
return '' |
840 |
|
841 |
connection = self._get_connection()
|
842 |
if db_tablespace and connection.features.supports_tablespaces: |
843 |
tablespace_sql = ' ' + connection.ops.tablespace_sql(db_tablespace)
|
844 |
else:
|
845 |
tablespace_sql = ''
|
846 |
|
847 |
index_name = self.create_index_name(table_name, column_names)
|
848 |
return 'CREATE %sINDEX %s ON %s (%s)%s;' % ( |
849 |
unique and 'UNIQUE ' or '', |
850 |
self.quote_name(index_name),
|
851 |
self.quote_name(table_name),
|
852 |
','.join([self.quote_name(field) for field in column_names]), |
853 |
tablespace_sql |
854 |
) |
855 |
|
856 |
@invalidate_table_constraints
|
857 |
def create_index(self, table_name, column_names, unique=False, db_tablespace=''): |
858 |
""" Executes a create index statement """
|
859 |
sql = self.create_index_sql(table_name, column_names, unique, db_tablespace)
|
860 |
self.execute(sql)
|
861 |
|
862 |
@invalidate_table_constraints
|
863 |
def delete_index(self, table_name, column_names, db_tablespace=''): |
864 |
"""
|
865 |
Deletes an index created with create_index.
|
866 |
This is possible using only columns due to the deterministic
|
867 |
index naming function which relies on column names.
|
868 |
"""
|
869 |
if isinstance(column_names, (str, unicode)): |
870 |
column_names = [column_names] |
871 |
name = self.create_index_name(table_name, column_names)
|
872 |
sql = self.drop_index_string % {
|
873 |
"index_name": self.quote_name(name), |
874 |
"table_name": self.quote_name(table_name), |
875 |
} |
876 |
self.execute(sql)
|
877 |
|
878 |
drop_index = alias('delete_index')
|
879 |
|
880 |
@delete_column_constraints
|
881 |
def delete_column(self, table_name, name): |
882 |
"""
|
883 |
Deletes the column 'column_name' from the table 'table_name'.
|
884 |
"""
|
885 |
params = (self.quote_name(table_name), self.quote_name(name)) |
886 |
self.execute(self.delete_column_string % params, []) |
887 |
|
888 |
drop_column = alias('delete_column')
|
889 |
|
890 |
def rename_column(self, table_name, old, new): |
891 |
"""
|
892 |
Renames the column 'old' from the table 'table_name' to 'new'.
|
893 |
"""
|
894 |
raise NotImplementedError("rename_column has no generic SQL syntax") |
895 |
|
896 |
@invalidate_table_constraints
|
897 |
def delete_primary_key(self, table_name): |
898 |
"""
|
899 |
Drops the old primary key.
|
900 |
"""
|
901 |
# Dry runs mean we can't do anything.
|
902 |
if self.dry_run: |
903 |
if self.debug: |
904 |
print ' - no dry run output for delete_primary_key() due to dynamic DDL, sorry' |
905 |
return
|
906 |
|
907 |
constraints = list(self._constraints_affecting_columns(table_name, None, type="PRIMARY KEY")) |
908 |
if not constraints: |
909 |
raise ValueError("Cannot find a PRIMARY KEY constraint on table %s" % (table_name,)) |
910 |
|
911 |
for constraint in constraints: |
912 |
self.execute(self.delete_primary_key_sql % { |
913 |
"table": self.quote_name(table_name), |
914 |
"constraint": self.quote_name(constraint), |
915 |
}) |
916 |
|
917 |
drop_primary_key = alias('delete_primary_key')
|
918 |
|
919 |
@invalidate_table_constraints
|
920 |
def create_primary_key(self, table_name, columns): |
921 |
"""
|
922 |
Creates a new primary key on the specified columns.
|
923 |
"""
|
924 |
if not isinstance(columns, (list, tuple)): |
925 |
columns = [columns] |
926 |
self.execute(self.create_primary_key_string % { |
927 |
"table": self.quote_name(table_name), |
928 |
"constraint": self.quote_name(table_name + "_pkey"), |
929 |
"columns": ", ".join(map(self.quote_name, columns)), |
930 |
}) |
931 |
|
932 |
def _find_primary_key_columns(self, table_name): |
933 |
"""
|
934 |
Find all columns of the primary key of the specified table
|
935 |
"""
|
936 |
db_name = self._get_setting('NAME') |
937 |
|
938 |
primary_key_columns = set()
|
939 |
for col, constraints in self.lookup_constraint(db_name, table_name): |
940 |
for kind, cname in constraints: |
941 |
if kind == 'PRIMARY KEY': |
942 |
primary_key_columns.add(col.lower()) |
943 |
|
944 |
return primary_key_columns
|
945 |
|
946 |
def start_transaction(self): |
947 |
"""
|
948 |
Makes sure the following commands are inside a transaction.
|
949 |
Must be followed by a (commit|rollback)_transaction call.
|
950 |
"""
|
951 |
if self.dry_run: |
952 |
self.pending_transactions += 1 |
953 |
transaction.commit_unless_managed(using=self.db_alias)
|
954 |
transaction.enter_transaction_management(using=self.db_alias)
|
955 |
transaction.managed(True, using=self.db_alias) |
956 |
|
957 |
def commit_transaction(self): |
958 |
"""
|
959 |
Commits the current transaction.
|
960 |
Must be preceded by a start_transaction call.
|
961 |
"""
|
962 |
if self.dry_run: |
963 |
return
|
964 |
transaction.commit(using=self.db_alias)
|
965 |
transaction.leave_transaction_management(using=self.db_alias)
|
966 |
|
967 |
def rollback_transaction(self): |
968 |
"""
|
969 |
Rolls back the current transaction.
|
970 |
Must be preceded by a start_transaction call.
|
971 |
"""
|
972 |
if self.dry_run: |
973 |
self.pending_transactions -= 1 |
974 |
transaction.rollback(using=self.db_alias)
|
975 |
transaction.leave_transaction_management(using=self.db_alias)
|
976 |
|
977 |
def rollback_transactions_dry_run(self): |
978 |
"""
|
979 |
Rolls back all pending_transactions during this dry run.
|
980 |
"""
|
981 |
if not self.dry_run: |
982 |
return
|
983 |
while self.pending_transactions > 0: |
984 |
self.rollback_transaction()
|
985 |
if transaction.is_dirty(using=self.db_alias): |
986 |
# Force an exception, if we're still in a dirty transaction.
|
987 |
# This means we are missing a COMMIT/ROLLBACK.
|
988 |
transaction.leave_transaction_management(using=self.db_alias)
|
989 |
|
990 |
def send_create_signal(self, app_label, model_names): |
991 |
self.pending_create_signals.append((app_label, model_names))
|
992 |
|
993 |
def send_pending_create_signals(self, verbosity=0, interactive=False): |
994 |
# Group app_labels together
|
995 |
signals = SortedDict() |
996 |
for (app_label, model_names) in self.pending_create_signals: |
997 |
try:
|
998 |
signals[app_label].extend(model_names) |
999 |
except KeyError: |
1000 |
signals[app_label] = list(model_names)
|
1001 |
# Send only one signal per app.
|
1002 |
for (app_label, model_names) in signals.iteritems(): |
1003 |
self.really_send_create_signal(app_label, list(set(model_names)), |
1004 |
verbosity=verbosity, |
1005 |
interactive=interactive) |
1006 |
self.pending_create_signals = []
|
1007 |
|
1008 |
def really_send_create_signal(self, app_label, model_names, |
1009 |
verbosity=0, interactive=False): |
1010 |
"""
|
1011 |
Sends a post_syncdb signal for the model specified.
|
1012 |
|
1013 |
If the model is not found (perhaps it's been deleted?),
|
1014 |
no signal is sent.
|
1015 |
|
1016 |
TODO: The behavior of django.contrib.* apps seems flawed in that
|
1017 |
they don't respect created_models. Rather, they blindly execute
|
1018 |
over all models within the app sending the signal. This is a
|
1019 |
patch we should push Django to make For now, this should work.
|
1020 |
"""
|
1021 |
|
1022 |
if self.debug: |
1023 |
print " - Sending post_syncdb signal for %s: %s" % (app_label, model_names) |
1024 |
|
1025 |
app = models.get_app(app_label) |
1026 |
if not app: |
1027 |
return
|
1028 |
|
1029 |
created_models = [] |
1030 |
for model_name in model_names: |
1031 |
model = models.get_model(app_label, model_name) |
1032 |
if model:
|
1033 |
created_models.append(model) |
1034 |
|
1035 |
if created_models:
|
1036 |
|
1037 |
if hasattr(dispatcher, "send"): |
1038 |
# Older djangos
|
1039 |
dispatcher.send(signal=models.signals.post_syncdb, sender=app, |
1040 |
app=app, created_models=created_models, |
1041 |
verbosity=verbosity, interactive=interactive) |
1042 |
else:
|
1043 |
if self._is_multidb(): |
1044 |
# Django 1.2+
|
1045 |
models.signals.post_syncdb.send( |
1046 |
sender=app, |
1047 |
app=app, |
1048 |
created_models=created_models, |
1049 |
verbosity=verbosity, |
1050 |
interactive=interactive, |
1051 |
db=self.db_alias,
|
1052 |
) |
1053 |
else:
|
1054 |
# Django 1.1 - 1.0
|
1055 |
models.signals.post_syncdb.send( |
1056 |
sender=app, |
1057 |
app=app, |
1058 |
created_models=created_models, |
1059 |
verbosity=verbosity, |
1060 |
interactive=interactive, |
1061 |
) |
1062 |
|
1063 |
def mock_model(self, model_name, db_table, db_tablespace='', |
1064 |
pk_field_name='id', pk_field_type=models.AutoField,
|
1065 |
pk_field_args=[], pk_field_kwargs={}): |
1066 |
"""
|
1067 |
Generates a MockModel class that provides enough information
|
1068 |
to be used by a foreign key/many-to-many relationship.
|
1069 |
|
1070 |
Migrations should prefer to use these rather than actual models
|
1071 |
as models could get deleted over time, but these can remain in
|
1072 |
migration files forever.
|
1073 |
|
1074 |
Depreciated.
|
1075 |
"""
|
1076 |
class MockOptions(object): |
1077 |
def __init__(self): |
1078 |
self.db_table = db_table
|
1079 |
self.db_tablespace = db_tablespace or settings.DEFAULT_TABLESPACE |
1080 |
self.object_name = model_name
|
1081 |
self.module_name = model_name.lower()
|
1082 |
|
1083 |
if pk_field_type == models.AutoField:
|
1084 |
pk_field_kwargs['primary_key'] = True |
1085 |
|
1086 |
self.pk = pk_field_type(*pk_field_args, **pk_field_kwargs)
|
1087 |
self.pk.set_attributes_from_name(pk_field_name)
|
1088 |
self.abstract = False |
1089 |
|
1090 |
def get_field_by_name(self, field_name): |
1091 |
# we only care about the pk field
|
1092 |
return (self.pk, self.model, True, False) |
1093 |
|
1094 |
def get_field(self, name): |
1095 |
# we only care about the pk field
|
1096 |
return self.pk |
1097 |
|
1098 |
class MockModel(object): |
1099 |
_meta = None
|
1100 |
|
1101 |
# We need to return an actual class object here, not an instance
|
1102 |
MockModel._meta = MockOptions() |
1103 |
MockModel._meta.model = MockModel |
1104 |
return MockModel
|
1105 |
|
1106 |
def _db_positive_type_for_alter_column(self, klass, field): |
1107 |
"""
|
1108 |
A helper for subclasses overriding _db_type_for_alter_column:
|
1109 |
Remove the check constraint from the type string for PositiveInteger
|
1110 |
and PositiveSmallInteger fields.
|
1111 |
@param klass: The type of the child (required to allow this to be used when it is subclassed)
|
1112 |
@param field: The field to generate type for
|
1113 |
"""
|
1114 |
super_result = super(klass, self)._db_type_for_alter_column(field) |
1115 |
if isinstance(field, (models.PositiveSmallIntegerField, models.PositiveIntegerField)): |
1116 |
return super_result.split(" ", 1)[0] |
1117 |
return super_result
|
1118 |
|
1119 |
def _alter_add_positive_check(self, klass, field, name, params, sqls): |
1120 |
"""
|
1121 |
A helper for subclasses overriding _alter_add_column_mods:
|
1122 |
Add a check constraint verifying positivity to PositiveInteger and
|
1123 |
PositiveSmallInteger fields.
|
1124 |
"""
|
1125 |
super(klass, self)._alter_add_column_mods(field, name, params, sqls) |
1126 |
if isinstance(field, (models.PositiveSmallIntegerField, models.PositiveIntegerField)): |
1127 |
uniq_hash = abs(hash(tuple(params.values()))) |
1128 |
d = dict(
|
1129 |
constraint = "CK_%s_PSTV_%s" % (name, hex(uniq_hash)[2:]), |
1130 |
check = "%s >= 0" % self.quote_name(name)) |
1131 |
sqls.append((self.add_check_constraint_fragment % d, []))
|
1132 |
|
1133 |
|
1134 |
# Single-level flattening of lists
|
1135 |
def flatten(ls): |
1136 |
nl = [] |
1137 |
for l in ls: |
1138 |
nl += l |
1139 |
return nl
|