1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
|
# sqlite.py
# Copyright (C) 2005, 2006, 2007 Michael Bayer mike_mp@zzzcomputing.com
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import sys, StringIO, string, types, re
from sqlalchemy import sql, engine, schema, ansisql, exceptions, pool, PassiveDefault
import sqlalchemy.engine.default as default
import sqlalchemy.types as sqltypes
import datetime,time
class SLNumeric(sqltypes.Numeric):
def get_col_spec(self):
if self.precision is None:
return "NUMERIC"
else:
return "NUMERIC(%(precision)s, %(length)s)" % {'precision': self.precision, 'length' : self.length}
class SLInteger(sqltypes.Integer):
def get_col_spec(self):
return "INTEGER"
class SLSmallInteger(sqltypes.Smallinteger):
def get_col_spec(self):
return "SMALLINT"
class DateTimeMixin(object):
def convert_bind_param(self, value, dialect):
if value is not None:
return str(value)
else:
return None
def _cvt(self, value, dialect, fmt):
if value is None:
return None
try:
(value, microsecond) = value.split('.')
microsecond = int(microsecond)
except ValueError:
(value, microsecond) = (value, 0)
return time.strptime(value, fmt)[0:6] + (microsecond,)
class SLDateTime(DateTimeMixin,sqltypes.DateTime):
def get_col_spec(self):
return "TIMESTAMP"
def convert_result_value(self, value, dialect):
tup = self._cvt(value, dialect, "%Y-%m-%d %H:%M:%S")
return tup and datetime.datetime(*tup)
class SLDate(DateTimeMixin, sqltypes.Date):
def get_col_spec(self):
return "DATE"
def convert_result_value(self, value, dialect):
tup = self._cvt(value, dialect, "%Y-%m-%d")
return tup and datetime.date(*tup[0:3])
class SLTime(DateTimeMixin, sqltypes.Time):
def get_col_spec(self):
return "TIME"
def convert_result_value(self, value, dialect):
tup = self._cvt(value, dialect, "%H:%M:%S")
return tup and datetime.time(*tup[3:7])
class SLText(sqltypes.TEXT):
def get_col_spec(self):
return "TEXT"
class SLString(sqltypes.String):
def get_col_spec(self):
return "VARCHAR(%(length)s)" % {'length' : self.length}
class SLChar(sqltypes.CHAR):
def get_col_spec(self):
return "CHAR(%(length)s)" % {'length' : self.length}
class SLBinary(sqltypes.Binary):
def get_col_spec(self):
return "BLOB"
class SLBoolean(sqltypes.Boolean):
def get_col_spec(self):
return "BOOLEAN"
def convert_bind_param(self, value, dialect):
if value is None:
return None
return value and 1 or 0
def convert_result_value(self, value, dialect):
if value is None:
return None
return value and True or False
colspecs = {
sqltypes.Integer : SLInteger,
sqltypes.Smallinteger : SLSmallInteger,
sqltypes.Numeric : SLNumeric,
sqltypes.Float : SLNumeric,
sqltypes.DateTime : SLDateTime,
sqltypes.Date : SLDate,
sqltypes.Time : SLTime,
sqltypes.String : SLString,
sqltypes.Binary : SLBinary,
sqltypes.Boolean : SLBoolean,
sqltypes.TEXT : SLText,
sqltypes.CHAR: SLChar,
}
pragma_names = {
'INTEGER' : SLInteger,
'SMALLINT' : SLSmallInteger,
'VARCHAR' : SLString,
'CHAR' : SLChar,
'TEXT' : SLText,
'NUMERIC' : SLNumeric,
'FLOAT' : SLNumeric,
'TIMESTAMP' : SLDateTime,
'DATETIME' : SLDateTime,
'DATE' : SLDate,
'BLOB' : SLBinary,
}
def descriptor():
return {'name':'sqlite',
'description':'SQLite',
'arguments':[
('database', "Database Filename",None)
]}
class SQLiteExecutionContext(default.DefaultExecutionContext):
def post_exec(self):
if self.compiled.isinsert:
self._last_inserted_ids = [self.cursor.lastrowid]
super(SQLiteExecutionContext, self).post_exec()
class SQLiteDialect(ansisql.ANSIDialect):
def __init__(self, **kwargs):
ansisql.ANSIDialect.__init__(self, default_paramstyle='qmark', **kwargs)
def vers(num):
return tuple([int(x) for x in num.split('.')])
self.supports_cast = (self.dbapi is None or vers(self.dbapi.sqlite_version) >= vers("3.2.3"))
def dbapi(cls):
try:
from pysqlite2 import dbapi2 as sqlite
except ImportError, e:
try:
from sqlite3 import dbapi2 as sqlite #try the 2.5+ stdlib name.
except ImportError:
try:
sqlite = __import__('sqlite') # skip ourselves
except ImportError:
raise e
return sqlite
dbapi = classmethod(dbapi)
def compiler(self, statement, bindparams, **kwargs):
return SQLiteCompiler(self, statement, bindparams, **kwargs)
def schemagenerator(self, *args, **kwargs):
return SQLiteSchemaGenerator(self, *args, **kwargs)
def schemadropper(self, *args, **kwargs):
return SQLiteSchemaDropper(self, *args, **kwargs)
def supports_alter(self):
return False
def preparer(self):
return SQLiteIdentifierPreparer(self)
def create_connect_args(self, url):
filename = url.database or ':memory:'
return ([filename], url.query)
def type_descriptor(self, typeobj):
return sqltypes.adapt_type(typeobj, colspecs)
def create_execution_context(self, **kwargs):
return SQLiteExecutionContext(self, **kwargs)
def supports_unicode_statements(self):
return True
def last_inserted_ids(self):
return self.context.last_inserted_ids
def oid_column_name(self, column):
return "oid"
def has_table(self, connection, table_name, schema=None):
cursor = connection.execute("PRAGMA table_info(" + table_name + ")", {})
row = cursor.fetchone()
# consume remaining rows, to work around: http://www.sqlite.org/cvstrac/tktview?tn=1884
while cursor.fetchone() is not None:pass
return (row is not None)
def reflecttable(self, connection, table):
c = connection.execute("PRAGMA table_info(" + table.name + ")", {})
found_table = False
while True:
row = c.fetchone()
if row is None:
break
#print "row! " + repr(row)
found_table = True
(name, type, nullable, has_default, primary_key) = (row[1], row[2].upper(), not row[3], row[4] is not None, row[5])
name = re.sub(r'^\"|\"$', '', name)
match = re.match(r'(\w+)(\(.*?\))?', type)
if match:
coltype = match.group(1)
args = match.group(2)
else:
coltype = "VARCHAR"
args = ''
#print "coltype: " + repr(coltype) + " args: " + repr(args)
coltype = pragma_names.get(coltype, SLString)
if args is not None:
args = re.findall(r'(\d+)', args)
#print "args! " +repr(args)
coltype = coltype(*[int(a) for a in args])
colargs= []
if has_default:
colargs.append(PassiveDefault('?'))
table.append_column(schema.Column(name, coltype, primary_key = primary_key, nullable = nullable, *colargs))
if not found_table:
raise exceptions.NoSuchTableError(table.name)
c = connection.execute("PRAGMA foreign_key_list(" + table.name + ")", {})
fks = {}
while True:
row = c.fetchone()
if row is None:
break
(constraint_name, tablename, localcol, remotecol) = (row[0], row[2], row[3], row[4])
tablename = re.sub(r'^\"|\"$', '', tablename)
localcol = re.sub(r'^\"|\"$', '', localcol)
remotecol = re.sub(r'^\"|\"$', '', remotecol)
try:
fk = fks[constraint_name]
except KeyError:
fk = ([],[])
fks[constraint_name] = fk
#print "row! " + repr([key for key in row.keys()]), repr(row)
# look up the table based on the given table's engine, not 'self',
# since it could be a ProxyEngine
remotetable = schema.Table(tablename, table.metadata, autoload=True, autoload_with=connection)
constrained_column = table.c[localcol].name
refspec = ".".join([tablename, remotecol])
if constrained_column not in fk[0]:
fk[0].append(constrained_column)
if refspec not in fk[1]:
fk[1].append(refspec)
for name, value in fks.iteritems():
table.append_constraint(schema.ForeignKeyConstraint(value[0], value[1]))
# check for UNIQUE indexes
c = connection.execute("PRAGMA index_list(" + table.name + ")", {})
unique_indexes = []
while True:
row = c.fetchone()
if row is None:
break
if (row[2] == 1):
unique_indexes.append(row[1])
# loop thru unique indexes for one that includes the primary key
for idx in unique_indexes:
c = connection.execute("PRAGMA index_info(" + idx + ")", {})
cols = []
while True:
row = c.fetchone()
if row is None:
break
cols.append(row[2])
col = table.columns[row[2]]
class SQLiteCompiler(ansisql.ANSICompiler):
def visit_cast(self, cast):
if self.dialect.supports_cast:
super(SQLiteCompiler, self).visit_cast(cast)
else:
if len(self.select_stack):
# not sure if we want to set the typemap here...
self.typemap.setdefault("CAST", cast.type)
self.strings[cast] = self.strings[cast.clause]
def limit_clause(self, select):
text = ""
if select.limit is not None:
text += " \n LIMIT " + str(select.limit)
if select.offset is not None:
if select.limit is None:
text += " \n LIMIT -1"
text += " OFFSET " + str(select.offset)
else:
text += " OFFSET 0"
return text
def for_update_clause(self, select):
# sqlite has no "FOR UPDATE" AFAICT
return ''
def binary_operator_string(self, binary):
if isinstance(binary.type, sqltypes.String) and binary.operator == '+':
return '||'
else:
return ansisql.ANSICompiler.binary_operator_string(self, binary)
class SQLiteSchemaGenerator(ansisql.ANSISchemaGenerator):
def get_column_specification(self, column, **kwargs):
colspec = self.preparer.format_column(column) + " " + column.type.dialect_impl(self.dialect).get_col_spec()
default = self.get_column_default_string(column)
if default is not None:
colspec += " DEFAULT " + default
if not column.nullable:
colspec += " NOT NULL"
return colspec
# this doesnt seem to be needed, although i suspect older versions of sqlite might still
# not directly support composite primary keys
#def visit_primary_key_constraint(self, constraint):
# if len(constraint) > 1:
# self.append(", \n")
# # put all PRIMARY KEYS in a UNIQUE index
# self.append("\tUNIQUE (%s)" % string.join([c.name for c in constraint],', '))
# else:
# super(SQLiteSchemaGenerator, self).visit_primary_key_constraint(constraint)
class SQLiteSchemaDropper(ansisql.ANSISchemaDropper):
pass
class SQLiteIdentifierPreparer(ansisql.ANSIIdentifierPreparer):
def __init__(self, dialect):
super(SQLiteIdentifierPreparer, self).__init__(dialect, omit_schema=True)
dialect = SQLiteDialect
dialect.poolclass = pool.SingletonThreadPool
|