summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/dialects/sqlite/base.py
diff options
context:
space:
mode:
Diffstat (limited to 'lib/sqlalchemy/dialects/sqlite/base.py')
-rw-r--r--lib/sqlalchemy/dialects/sqlite/base.py178
1 files changed, 102 insertions, 76 deletions
diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py
index c38baa738..6d2d0e89d 100644
--- a/lib/sqlalchemy/dialects/sqlite/base.py
+++ b/lib/sqlalchemy/dialects/sqlite/base.py
@@ -12,14 +12,15 @@
Date and Time Types
-------------------
-SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite does not provide
-out of the box functionality for translating values between Python `datetime` objects
-and a SQLite-supported format. SQLAlchemy's own :class:`~sqlalchemy.types.DateTime`
-and related types provide date formatting and parsing functionality when SQlite is used.
-The implementation classes are :class:`~.sqlite.DATETIME`, :class:`~.sqlite.DATE` and :class:`~.sqlite.TIME`.
-These types represent dates and times as ISO formatted strings, which also nicely
-support ordering. There's no reliance on typical "libc" internals for these functions
-so historical dates are fully supported.
+SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite
+does not provide out of the box functionality for translating values between
+Python `datetime` objects and a SQLite-supported format. SQLAlchemy's own
+:class:`~sqlalchemy.types.DateTime` and related types provide date formatting
+and parsing functionality when SQlite is used. The implementation classes are
+:class:`~.sqlite.DATETIME`, :class:`~.sqlite.DATE` and :class:`~.sqlite.TIME`.
+These types represent dates and times as ISO formatted strings, which also
+nicely support ordering. There's no reliance on typical "libc" internals
+for these functions so historical dates are fully supported.
Auto Incrementing Behavior
--------------------------
@@ -46,44 +47,47 @@ to the Table construct::
Transaction Isolation Level
---------------------------
-:func:`.create_engine` accepts an ``isolation_level`` parameter which results in
-the command ``PRAGMA read_uncommitted <level>`` being invoked for every new
-connection. Valid values for this parameter are ``SERIALIZABLE`` and
-``READ UNCOMMITTED`` corresponding to a value of 0 and 1, respectively.
+:func:`.create_engine` accepts an ``isolation_level`` parameter which
+results in the command ``PRAGMA read_uncommitted <level>`` being invoked for
+every new connection. Valid values for this parameter are ``SERIALIZABLE``
+and ``READ UNCOMMITTED`` corresponding to a value of 0 and 1, respectively.
See the section :ref:`pysqlite_serializable` for an important workaround
when using serializable isolation with Pysqlite.
Database Locking Behavior / Concurrency
---------------------------------------
-Note that SQLite is not designed for a high level of concurrency. The database
-itself, being a file, is locked completely during write operations and within
-transactions, meaning exactly one connection has exclusive access to the database
-during this period - all other connections will be blocked during this time.
+Note that SQLite is not designed for a high level of concurrency. The
+database itself, being a file, is locked completely during write operations
+and within transactions, meaning exactly one connection has exclusive access
+to the database during this period - all other connections will be blocked
+during this time.
The Python DBAPI specification also calls for a connection model that is always
-in a transaction; there is no BEGIN method, only commit and rollback. This implies
-that a SQLite DBAPI driver would technically allow only serialized access to a
-particular database file at all times. The pysqlite driver attempts to ameliorate this by
-deferring the actual BEGIN statement until the first DML (INSERT, UPDATE, or
-DELETE) is received within a transaction. While this breaks serializable isolation,
-it at least delays the exclusive locking inherent in SQLite's design.
+in a transaction; there is no BEGIN method, only commit and rollback. This
+implies that a SQLite DBAPI driver would technically allow only serialized
+access to a particular database file at all times. The pysqlite driver
+attempts to ameliorate this by deferring the actual BEGIN statement until
+the first DML (INSERT, UPDATE, or DELETE) is received within a
+transaction. While this breaks serializable isolation, it at least delays
+the exclusive locking inherent in SQLite's design.
SQLAlchemy's default mode of usage with the ORM is known
-as "autocommit=False", which means the moment the :class:`.Session` begins to be
-used, a transaction is begun. As the :class:`.Session` is used, the autoflush
-feature, also on by default, will flush out pending changes to the database
-before each query. The effect of this is that a :class:`.Session` used in its
-default mode will often emit DML early on, long before the transaction is actually
-committed. This again will have the effect of serializing access to the SQLite
-database. If highly concurrent reads are desired against the SQLite database,
-it is advised that the autoflush feature be disabled, and potentially even
-that autocommit be re-enabled, which has the effect of each SQL statement and
-flush committing changes immediately.
+as "autocommit=False", which means the moment the :class:`.Session` begins to
+be used, a transaction is begun. As the :class:`.Session` is used, the
+autoflush feature, also on by default, will flush out pending changes to the
+database before each query. The effect of this is that a :class:`.Session`
+used in its default mode will often emit DML early on, long before the
+transaction is actually committed. This again will have the effect of
+serializing access to the SQLite database. If highly concurrent reads are
+desired against the SQLite database, it is advised that the autoflush feature
+be disabled, and potentially even that autocommit be re-enabled, which has
+the effect of each SQL statement and flush committing changes immediately.
For more information on SQLite's lack of concurrency by design, please
-see `Situations Where Another RDBMS May Work Better - High Concurrency <http://www.sqlite.org/whentouse.html>`_
-near the bottom of the page.
+see `Situations Where Another RDBMS May Work Better - High
+Concurrency <http://www.sqlite.org/whentouse.html>`_ near the bottom of
+ the page.
.. _sqlite_foreign_keys:
@@ -123,7 +127,8 @@ for new connections through the usage of events::
"""
-import datetime, re
+import datetime
+import re
from sqlalchemy import sql, exc
from sqlalchemy.engine import default, base, reflection
@@ -135,6 +140,7 @@ from sqlalchemy import processors
from sqlalchemy.types import BLOB, BOOLEAN, CHAR, DATE, DATETIME, DECIMAL,\
FLOAT, REAL, INTEGER, NUMERIC, SMALLINT, TEXT, TIME, TIMESTAMP, VARCHAR
+
class _DateTimeMixin(object):
_reg = None
_storage_format = None
@@ -146,6 +152,7 @@ class _DateTimeMixin(object):
if storage_format is not None:
self._storage_format = storage_format
+
class DATETIME(_DateTimeMixin, sqltypes.DateTime):
"""Represent a Python datetime object in SQLite using a string.
@@ -164,9 +171,9 @@ class DATETIME(_DateTimeMixin, sqltypes.DateTime):
from sqlalchemy.dialects.sqlite import DATETIME
dt = DATETIME(
- storage_format="%(year)04d/%(month)02d/%(day)02d %(hour)02d:%(min)02d:%(second)02d",
- regexp=re.compile("(\d+)/(\d+)/(\d+) (\d+)-(\d+)-(\d+)")
- )
+ storage_format="%(year)04d/%(month)02d/%(day)02d %(hour)02d:%(min)02d:%(second)02d",
+ regexp=re.compile("(\d+)/(\d+)/(\d+) (\d+)-(\d+)-(\d+)")
+ )
:param storage_format: format string which will be applied to the
dict with keys year, month, day, hour, minute, second, and microsecond.
@@ -201,6 +208,7 @@ class DATETIME(_DateTimeMixin, sqltypes.DateTime):
datetime_datetime = datetime.datetime
datetime_date = datetime.date
format = self._storage_format
+
def process(value):
if value is None:
return None
@@ -236,6 +244,7 @@ class DATETIME(_DateTimeMixin, sqltypes.DateTime):
else:
return processors.str_to_datetime
+
class DATE(_DateTimeMixin, sqltypes.Date):
"""Represent a Python date object in SQLite using a string.
@@ -274,6 +283,7 @@ class DATE(_DateTimeMixin, sqltypes.Date):
def bind_processor(self, dialect):
datetime_date = datetime.date
format = self._storage_format
+
def process(value):
if value is None:
return None
@@ -295,6 +305,7 @@ class DATE(_DateTimeMixin, sqltypes.Date):
else:
return processors.str_to_date
+
class TIME(_DateTimeMixin, sqltypes.Time):
"""Represent a Python time object in SQLite using a string.
@@ -313,9 +324,9 @@ class TIME(_DateTimeMixin, sqltypes.Time):
from sqlalchemy.dialects.sqlite import TIME
t = TIME(
- storage_format="%(hour)02d-%(minute)02d-%(second)02d-%(microsecond)06d",
- regexp=re.compile("(\d+)-(\d+)-(\d+)-(?:-(\d+))?")
- )
+ storage_format="%(hour)02d-%(minute)02d-%(second)02d-%(microsecond)06d",
+ regexp=re.compile("(\d+)-(\d+)-(\d+)-(?:-(\d+))?")
+ )
:param storage_format: format string which will be applied to the
dict with keys hour, minute, second, and microsecond.
@@ -343,6 +354,7 @@ class TIME(_DateTimeMixin, sqltypes.Time):
def bind_processor(self, dialect):
datetime_time = datetime.time
format = self._storage_format
+
def process(value):
if value is None:
return None
@@ -394,7 +406,6 @@ ischema_names = {
}
-
class SQLiteCompiler(compiler.SQLCompiler):
extract_map = util.update_copy(
compiler.SQLCompiler.extract_map,
@@ -435,7 +446,9 @@ class SQLiteCompiler(compiler.SQLCompiler):
def visit_extract(self, extract, **kw):
try:
return "CAST(STRFTIME('%s', %s) AS INTEGER)" % (
- self.extract_map[extract.field], self.process(extract.expr, **kw))
+ self.extract_map[extract.field],
+ self.process(extract.expr, **kw)
+ )
except KeyError:
raise exc.CompileError(
"%s is not a valid extract argument." % extract.field)
@@ -443,7 +456,7 @@ class SQLiteCompiler(compiler.SQLCompiler):
def limit_clause(self, select):
text = ""
if select._limit is not None:
- text += "\n LIMIT " + self.process(sql.literal(select._limit))
+ text += "\n LIMIT " + self.process(sql.literal(select._limit))
if select._offset is not None:
if select._limit is None:
text += "\n LIMIT " + self.process(sql.literal(-1))
@@ -460,7 +473,8 @@ class SQLiteCompiler(compiler.SQLCompiler):
class SQLiteDDLCompiler(compiler.DDLCompiler):
def get_column_specification(self, column, **kwargs):
- colspec = self.preparer.format_column(column) + " " + self.dialect.type_compiler.process(column.type)
+ coltype = self.dialect.type_compiler.process(column.type)
+ colspec = self.preparer.format_column(column) + " " + coltype
default = self.get_column_default_string(column)
if default is not None:
colspec += " DEFAULT " + default
@@ -468,12 +482,12 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
if not column.nullable:
colspec += " NOT NULL"
- if column.primary_key and \
- column.table.kwargs.get('sqlite_autoincrement', False) and \
- len(column.table.primary_key.columns) == 1 and \
- issubclass(column.type._type_affinity, sqltypes.Integer) and \
- not column.foreign_keys:
- colspec += " PRIMARY KEY AUTOINCREMENT"
+ if (column.primary_key and
+ column.table.kwargs.get('sqlite_autoincrement', False) and
+ len(column.table.primary_key.columns) == 1 and
+ issubclass(column.type._type_affinity, sqltypes.Integer) and
+ not column.foreign_keys):
+ colspec += " PRIMARY KEY AUTOINCREMENT"
return colspec
@@ -521,10 +535,12 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
for c in index.columns))
return text
+
class SQLiteTypeCompiler(compiler.GenericTypeCompiler):
def visit_large_binary(self, type_):
return self.visit_BLOB(type_)
+
class SQLiteIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = set([
'add', 'after', 'all', 'alter', 'analyze', 'and', 'as', 'asc',
@@ -536,14 +552,15 @@ class SQLiteIdentifierPreparer(compiler.IdentifierPreparer):
'drop', 'each', 'else', 'end', 'escape', 'except', 'exclusive',
'explain', 'false', 'fail', 'for', 'foreign', 'from', 'full', 'glob',
'group', 'having', 'if', 'ignore', 'immediate', 'in', 'index',
- 'indexed', 'initially', 'inner', 'insert', 'instead', 'intersect', 'into', 'is',
- 'isnull', 'join', 'key', 'left', 'like', 'limit', 'match', 'natural',
- 'not', 'notnull', 'null', 'of', 'offset', 'on', 'or', 'order', 'outer',
- 'plan', 'pragma', 'primary', 'query', 'raise', 'references',
- 'reindex', 'rename', 'replace', 'restrict', 'right', 'rollback',
- 'row', 'select', 'set', 'table', 'temp', 'temporary', 'then', 'to',
- 'transaction', 'trigger', 'true', 'union', 'unique', 'update', 'using',
- 'vacuum', 'values', 'view', 'virtual', 'when', 'where',
+ 'indexed', 'initially', 'inner', 'insert', 'instead', 'intersect',
+ 'into', 'is', 'isnull', 'join', 'key', 'left', 'like', 'limit',
+ 'match', 'natural', 'not', 'notnull', 'null', 'of', 'offset', 'on',
+ 'or', 'order', 'outer', 'plan', 'pragma', 'primary', 'query',
+ 'raise', 'references', 'reindex', 'rename', 'replace', 'restrict',
+ 'right', 'rollback', 'row', 'select', 'set', 'table', 'temp',
+ 'temporary', 'then', 'to', 'transaction', 'trigger', 'true', 'union',
+ 'unique', 'update', 'using', 'vacuum', 'values', 'view', 'virtual',
+ 'when', 'where',
])
def format_index(self, index, use_schema=True, name=None):
@@ -552,10 +569,14 @@ class SQLiteIdentifierPreparer(compiler.IdentifierPreparer):
if name is None:
name = index.name
result = self.quote(name, index.quote)
- if not self.omit_schema and use_schema and getattr(index.table, "schema", None):
- result = self.quote_schema(index.table.schema, index.table.quote_schema) + "." + result
+ if (not self.omit_schema and
+ use_schema and
+ getattr(index.table, "schema", None)):
+ result = self.quote_schema(
+ index.table.schema, index.table.quote_schema) + "." + result
return result
+
class SQLiteExecutionContext(default.DefaultExecutionContext):
@util.memoized_property
def _preserve_raw_colnames(self):
@@ -611,9 +632,10 @@ class SQLiteDialect(default.DefaultDialect):
self.dbapi.sqlite_version_info >= (3, 2, 3)
_isolation_lookup = {
- 'READ UNCOMMITTED':1,
- 'SERIALIZABLE':0
+ 'READ UNCOMMITTED': 1,
+ 'SERIALIZABLE': 0
}
+
def set_isolation_level(self, connection, level):
try:
isolation_level = self._isolation_lookup[level.replace('_', ' ')]
@@ -686,7 +708,8 @@ class SQLiteDialect(default.DefaultDialect):
else:
pragma = "PRAGMA "
qtable = quote(table_name)
- cursor = _pragma_cursor(connection.execute("%stable_info(%s)" % (pragma, qtable)))
+ statement = "%stable_info(%s)" % (pragma, qtable)
+ cursor = _pragma_cursor(connection.execute(statement))
row = cursor.fetchone()
# consume remaining rows, to work around
@@ -752,9 +775,8 @@ class SQLiteDialect(default.DefaultDialect):
else:
pragma = "PRAGMA "
qtable = quote(table_name)
- c = _pragma_cursor(
- connection.execute("%stable_info(%s)" %
- (pragma, qtable)))
+ statement = "%stable_info(%s)" % (pragma, qtable)
+ c = _pragma_cursor(connection.execute(statement))
rows = c.fetchall()
columns = []
@@ -806,7 +828,7 @@ class SQLiteDialect(default.DefaultDialect):
for col in cols:
if col['primary_key']:
pkeys.append(col['name'])
- return {'constrained_columns':pkeys, 'name':None}
+ return {'constrained_columns': pkeys, 'name': None}
@reflection.cache
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
@@ -816,7 +838,8 @@ class SQLiteDialect(default.DefaultDialect):
else:
pragma = "PRAGMA "
qtable = quote(table_name)
- c = _pragma_cursor(connection.execute("%sforeign_key_list(%s)" % (pragma, qtable)))
+ statement = "%sforeign_key_list(%s)" % (pragma, qtable)
+ c = _pragma_cursor(connection.execute(statement))
fkeys = []
fks = {}
while True:
@@ -839,10 +862,10 @@ class SQLiteDialect(default.DefaultDialect):
except KeyError:
fk = {
'name': None,
- 'constrained_columns' : [],
- 'referred_schema' : None,
- 'referred_table' : rtbl,
- 'referred_columns' : []
+ 'constrained_columns': [],
+ 'referred_schema': None,
+ 'referred_table': rtbl,
+ 'referred_columns': []
}
fkeys.append(fk)
fks[numerical_id] = fk
@@ -864,7 +887,8 @@ class SQLiteDialect(default.DefaultDialect):
pragma = "PRAGMA "
include_auto_indexes = kw.pop('include_auto_indexes', False)
qtable = quote(table_name)
- c = _pragma_cursor(connection.execute("%sindex_list(%s)" % (pragma, qtable)))
+ statement = "%sindex_list(%s)" % (pragma, qtable)
+ c = _pragma_cursor(connection.execute(statement))
indexes = []
while True:
row = c.fetchone()
@@ -872,13 +896,15 @@ class SQLiteDialect(default.DefaultDialect):
break
# ignore implicit primary key index.
# http://www.mail-archive.com/sqlite-users@sqlite.org/msg30517.html
- elif not include_auto_indexes and row[1].startswith('sqlite_autoindex'):
+ elif (not include_auto_indexes and
+ row[1].startswith('sqlite_autoindex')):
continue
indexes.append(dict(name=row[1], column_names=[], unique=row[2]))
# loop thru unique indexes to get the column names.
for idx in indexes:
- c = connection.execute("%sindex_info(%s)" % (pragma, quote(idx['name'])))
+ statement = "%sindex_info(%s)" % (pragma, quote(idx['name']))
+ c = connection.execute(statement)
cols = idx['column_names']
while True:
row = c.fetchone()