Project

General

Profile

Statistics
| Branch: | Revision:

root / env / lib / python2.7 / site-packages / south / db / sqlite3.py @ d1a4905f

History | View | Annotate | Download (10.5 KB)

1
from south.db import generic
2

    
3
    
4
class DatabaseOperations(generic.DatabaseOperations):
5

    
6
    """
7
    SQLite3 implementation of database operations.
8
    """
9
    
10
    backend_name = "sqlite3"
11

    
12
    # SQLite ignores several constraints. I wish I could.
13
    supports_foreign_keys = False
14
    has_check_constraints = False
15
    has_booleans = False
16

    
17
    def add_column(self, table_name, name, field, *args, **kwds):
18
        """
19
        Adds a column.
20
        """
21
        # If it's not nullable, and has no default, raise an error (SQLite is picky)
22
        if (not field.null and
23
           (not field.has_default() or field.get_default() is None) and
24
           not field.empty_strings_allowed):
25
            raise ValueError("You cannot add a null=False column without a default value.")
26
        # Initialise the field.
27
        field.set_attributes_from_name(name)
28
        # We add columns by remaking the table; even though SQLite supports
29
        # adding columns, it doesn't support adding PRIMARY KEY or UNIQUE cols.
30
        self._remake_table(table_name, added={
31
            field.column: self._column_sql_for_create(table_name, name, field, False),
32
        })
33

    
34
    def _get_full_table_description(self, connection, cursor, table_name):
35
        cursor.execute('PRAGMA table_info(%s)' % connection.ops.quote_name(table_name))
36
        # cid, name, type, notnull, dflt_value, pk
37
        return [{'name': field[1],
38
                 'type': field[2],
39
                 'null_ok': not field[3],
40
                 'dflt_value': field[4],
41
                 'pk': field[5]     # undocumented
42
                 } for field in cursor.fetchall()]
43

    
44
    @generic.invalidate_table_constraints
45
    def _remake_table(self, table_name, added={}, renames={}, deleted=[], altered={}, primary_key_override=None, uniques_deleted=[]):
46
        """
47
        Given a table and three sets of changes (renames, deletes, alters),
48
        recreates it with the modified schema.
49
        """
50
        # Dry runs get skipped completely
51
        if self.dry_run:
52
            return
53
        # Temporary table's name
54
        temp_name = "_south_new_" + table_name
55
        # Work out the (possibly new) definitions of each column
56
        definitions = {}
57
        cursor = self._get_connection().cursor()
58
        # Get the index descriptions
59
        indexes = self._get_connection().introspection.get_indexes(cursor, table_name)
60
        multi_indexes = self._get_multi_indexes(table_name)
61
        # Work out new column defs.
62
        for column_info in self._get_full_table_description(self._get_connection(), cursor, table_name):
63
            name = column_info['name']
64
            if name in deleted:
65
                continue
66
            # Get the type, ignoring PRIMARY KEY (we need to be consistent)
67
            type = column_info['type'].replace("PRIMARY KEY", "")
68
            # Add on primary key, not null or unique if needed.
69
            if (primary_key_override and primary_key_override == name) or \
70
               (not primary_key_override and name in indexes and
71
                indexes[name]['primary_key']):
72
                type += " PRIMARY KEY"
73
            elif not column_info['null_ok']:
74
                type += " NOT NULL"
75
            if (name in indexes and indexes[name]['unique'] and
76
                name not in uniques_deleted):
77
                type += " UNIQUE"
78
            if column_info['dflt_value'] is not None:
79
                type += " DEFAULT " + column_info['dflt_value']
80
            # Deal with a rename
81
            if name in renames:
82
                name = renames[name]
83
            # Add to the defs
84
            definitions[name] = type
85
        # Add on altered columns
86
        for name, type in altered.items():
87
            if (primary_key_override and primary_key_override == name) or \
88
               (not primary_key_override and name in indexes and
89
                indexes[name]['primary_key']):
90
                type += " PRIMARY KEY"
91
            if (name in indexes and indexes[name]['unique'] and
92
                name not in uniques_deleted):
93
                type += " UNIQUE"
94
            definitions[name] = type
95
        # Add on the new columns
96
        for name, type in added.items():
97
            if (primary_key_override and primary_key_override == name):
98
                type += " PRIMARY KEY"
99
            definitions[name] = type
100
        # Alright, Make the table
101
        self.execute("CREATE TABLE %s (%s)" % (
102
            self.quote_name(temp_name),
103
            ", ".join(["%s %s" % (self.quote_name(cname), ctype) for cname, ctype in definitions.items()]),
104
        ))
105
        # Copy over the data
106
        self._copy_data(table_name, temp_name, renames)
107
        # Delete the old table, move our new one over it
108
        self.delete_table(table_name)
109
        self.rename_table(temp_name, table_name)
110
        # Recreate multi-valued indexes
111
        # We can't do that before since it's impossible to rename indexes
112
        # and index name scope is global
113
        self._make_multi_indexes(table_name, multi_indexes, renames=renames, deleted=deleted, uniques_deleted=uniques_deleted)
114
    
115
    def _copy_data(self, src, dst, field_renames={}):
116
        "Used to copy data into a new table"
117
        # Make a list of all the fields to select
118
        cursor = self._get_connection().cursor()
119
        src_fields = [column_info[0] for column_info in self._get_connection().introspection.get_table_description(cursor, src)]
120
        dst_fields = [column_info[0] for column_info in self._get_connection().introspection.get_table_description(cursor, dst)]
121
        src_fields_new = []
122
        dst_fields_new = []
123
        for field in src_fields:
124
            if field in field_renames:
125
                dst_fields_new.append(self.quote_name(field_renames[field]))
126
            elif field in dst_fields:
127
                dst_fields_new.append(self.quote_name(field))
128
            else:
129
                continue
130
            src_fields_new.append(self.quote_name(field))
131
        # Copy over the data
132
        self.execute("INSERT INTO %s (%s) SELECT %s FROM %s;" % (
133
            self.quote_name(dst),
134
            ', '.join(dst_fields_new),
135
            ', '.join(src_fields_new),
136
            self.quote_name(src),
137
        ))
138

    
139
    def _create_unique(self, table_name, columns):
140
        self.execute("CREATE UNIQUE INDEX %s ON %s(%s);" % (
141
            self.quote_name('%s_%s' % (table_name, '__'.join(columns))),
142
            self.quote_name(table_name),
143
            ', '.join(self.quote_name(c) for c in columns),
144
        ))
145

    
146
    def _get_multi_indexes(self, table_name):
147
        indexes = []
148
        cursor = self._get_connection().cursor()
149
        cursor.execute('PRAGMA index_list(%s)' % self.quote_name(table_name))
150
        # seq, name, unique
151
        for index, unique in [(field[1], field[2]) for field in cursor.fetchall()]:
152
            if not unique:
153
                continue
154
            cursor.execute('PRAGMA index_info(%s)' % self.quote_name(index))
155
            info = cursor.fetchall()
156
            if len(info) == 1:
157
                continue
158
            columns = []
159
            for field in info:
160
                columns.append(field[2])
161
            indexes.append(columns)
162
        return indexes
163

    
164
    def _make_multi_indexes(self, table_name, indexes, deleted=[], renames={}, uniques_deleted=[]):
165
        for index in indexes:
166
            columns = []
167

    
168
            for name in index:
169
                # Handle deletion
170
                if name in deleted:
171
                    columns = []
172
                    break
173

    
174
                # Handle renames
175
                if name in renames:
176
                    name = renames[name]
177
                columns.append(name)
178

    
179
            if columns and columns != uniques_deleted:
180
                self._create_unique(table_name, columns)
181
    
182
    def _column_sql_for_create(self, table_name, name, field, explicit_name=True):
183
        "Given a field and its name, returns the full type for the CREATE TABLE (without unique/pk)"
184
        field.set_attributes_from_name(name)
185
        if not explicit_name:
186
            name = field.db_column
187
        else:
188
            field.column = name
189
        sql = self.column_sql(table_name, name, field, with_name=False, field_prepared=True)
190
        # Remove keywords we don't want (this should be type only, not constraint)
191
        if sql:
192
            sql = sql.replace("PRIMARY KEY", "")
193
        return sql
194
    
195
    def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
196
        """
197
        Changes a column's SQL definition.
198

199
        Note that this sqlite3 implementation ignores the ignore_constraints argument.
200
        The argument is accepted for API compatibility with the generic
201
        DatabaseOperations.alter_column() method.
202
        """
203
        # Remake the table correctly
204
        self._remake_table(table_name, altered={
205
            name: self._column_sql_for_create(table_name, name, field, explicit_name),
206
        })
207

    
208
    def delete_column(self, table_name, column_name):
209
        """
210
        Deletes a column.
211
        """
212
        self._remake_table(table_name, deleted=[column_name])
213
    
214
    def rename_column(self, table_name, old, new):
215
        """
216
        Renames a column from one name to another.
217
        """
218
        self._remake_table(table_name, renames={old: new})
219
    
220
    def create_unique(self, table_name, columns):
221
        """
222
        Create an unique index on columns
223
        """
224
        self._create_unique(table_name, columns)
225
    
226
    def delete_unique(self, table_name, columns):
227
        """
228
        Delete an unique index
229
        """
230
        self._remake_table(table_name, uniques_deleted=columns)
231
    
232
    def create_primary_key(self, table_name, columns):
233
        if not isinstance(columns, (list, tuple)):
234
            columns = [columns]
235
        assert len(columns) == 1, "SQLite backend does not support multi-column primary keys"
236
        self._remake_table(table_name, primary_key_override=columns[0])
237

    
238
    # Not implemented this yet.
239
    def delete_primary_key(self, table_name):
240
        # By passing True in, we make sure we wipe all existing PKs.
241
        self._remake_table(table_name, primary_key_override=True)
242
    
243
    # No cascades on deletes
244
    def delete_table(self, table_name, cascade=True):
245
        generic.DatabaseOperations.delete_table(self, table_name, False)
246

    
247
    def _default_value_workaround(self, default):
248
        if default == True:
249
            default = 1
250
        elif default == False:
251
            default = 0
252
        return default