summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/databases/postgres.py
diff options
context:
space:
mode:
authorJason Kirtland <jek@discorporate.us>2008-01-10 23:16:56 +0000
committerJason Kirtland <jek@discorporate.us>2008-01-10 23:16:56 +0000
commit3e9df22546cb4c7af0ece290f4f57a377516f142 (patch)
treea02dee0b6513ee58d80da2f58577a0cc53e58c5c /lib/sqlalchemy/databases/postgres.py
parent04ad3303dc84b220292752afe343e73e35707e00 (diff)
downloadsqlalchemy-3e9df22546cb4c7af0ece290f4f57a377516f142.tar.gz
Include column name in length-less String warning (more [ticket:912])
Diffstat (limited to 'lib/sqlalchemy/databases/postgres.py')
-rw-r--r--lib/sqlalchemy/databases/postgres.py56
1 files changed, 28 insertions, 28 deletions
diff --git a/lib/sqlalchemy/databases/postgres.py b/lib/sqlalchemy/databases/postgres.py
index 6d29430b9..623726980 100644
--- a/lib/sqlalchemy/databases/postgres.py
+++ b/lib/sqlalchemy/databases/postgres.py
@@ -14,8 +14,8 @@ option to the Index constructor::
PostgreSQL 8.2+ supports returning a result set from inserts and updates.
To use this pass the column/expression list to the postgres_returning
parameter when creating the queries::
-
- raises = tbl.update(empl.c.sales > 100, values=dict(salary=empl.c.salary * 1.1),
+
+ raises = tbl.update(empl.c.sales > 100, values=dict(salary=empl.c.salary * 1.1),
postgres_returning=[empl.c.id, empl.c.salary]).execute().fetchall()
"""
@@ -31,7 +31,7 @@ from sqlalchemy import types as sqltypes
class PGInet(sqltypes.TypeEngine):
def get_col_spec(self):
return "INET"
-
+
class PGMacAddr(sqltypes.TypeEngine):
def get_col_spec(self):
return "MACADDR"
@@ -56,7 +56,7 @@ class PGNumeric(sqltypes.Numeric):
else:
return value
return process
-
+
class PGFloat(sqltypes.Float):
def get_col_spec(self):
if not self.precision:
@@ -118,13 +118,13 @@ class PGArray(sqltypes.Concatenable, sqltypes.TypeEngine):
if isinstance(item_type, type):
item_type = item_type()
self.item_type = item_type
-
+
def dialect_impl(self, dialect, **kwargs):
impl = self.__class__.__new__(self.__class__)
impl.__dict__.update(self.__dict__)
impl.item_type = self.item_type.dialect_impl(dialect)
return impl
-
+
def bind_processor(self, dialect):
item_proc = self.item_type.bind_processor(dialect)
def process(value):
@@ -140,7 +140,7 @@ class PGArray(sqltypes.Concatenable, sqltypes.TypeEngine):
return item
return [convert_item(item) for item in value]
return process
-
+
def result_processor(self, dialect):
item_proc = self.item_type.result_processor(dialect)
def process(value):
@@ -242,15 +242,15 @@ class PGExecutionContext(default.DefaultExecutionContext):
m = SELECT_RE.match(statement)
return m and (not m.group(1) or (RETURNING_RE.search(statement)
and RETURNING_QUOTED_RE.match(statement)))
-
+
def returns_rows_compiled(self, compiled):
return isinstance(compiled.statement, expression.Selectable) or \
(
(compiled.isupdate or compiled.isinsert) and "postgres_returning" in compiled.statement.kwargs
)
-
+
def create_cursor(self):
- # executing a default or Sequence standalone creates an execution context without a statement.
+ # executing a default or Sequence standalone creates an execution context without a statement.
# so slightly hacky "if no statement assume we're server side" logic
# TODO: dont use regexp if Compiled is used ?
self.__is_server_side = \
@@ -272,7 +272,7 @@ class PGExecutionContext(default.DefaultExecutionContext):
return base.BufferedRowResultProxy(self)
else:
return base.ResultProxy(self)
-
+
def post_exec(self):
if self.compiled.isinsert and self.last_inserted_ids is None:
if not self.dialect.use_oids:
@@ -285,7 +285,7 @@ class PGExecutionContext(default.DefaultExecutionContext):
row = self.connection.execute(s).fetchone()
self._last_inserted_ids = [v for v in row]
super(PGExecutionContext, self).post_exec()
-
+
class PGDialect(default.DefaultDialect):
supports_alter = True
supports_unicode_statements = False
@@ -300,12 +300,12 @@ class PGDialect(default.DefaultDialect):
self.use_oids = use_oids
self.server_side_cursors = server_side_cursors
self.paramstyle = 'pyformat'
-
+
def dbapi(cls):
import psycopg2 as psycopg
return psycopg
dbapi = classmethod(dbapi)
-
+
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
if 'port' in opts:
@@ -390,10 +390,10 @@ class PGDialect(default.DefaultDialect):
return "losed the connection unexpectedly" in str(e)
else:
return False
-
+
def table_names(self, connection, schema):
s = """
- SELECT relname
+ SELECT relname
FROM pg_class c
WHERE relkind = 'r'
AND '%(schema)s' = (select nspname from pg_namespace n where n.oid = c.relnamespace)
@@ -417,7 +417,7 @@ class PGDialect(default.DefaultDialect):
else:
schema_where_clause = "pg_catalog.pg_table_is_visible(c.oid)"
schemaname = None
-
+
SQL_COLS = """
SELECT a.attname,
pg_catalog.format_type(a.atttypid, a.atttypmod),
@@ -447,11 +447,11 @@ class PGDialect(default.DefaultDialect):
raise exceptions.NoSuchTableError(table.name)
domains = self._load_domains(connection)
-
+
for name, format_type, default, notnull, attnum, table_oid in rows:
if include_columns and name not in include_columns:
continue
-
+
## strip (30) from character varying(30)
attype = re.search('([^\([]+)', format_type).group(1)
nullable = not notnull
@@ -563,7 +563,7 @@ class PGDialect(default.DefaultDialect):
if referred_schema:
referred_schema = preparer._unquote_identifier(referred_schema)
elif table.schema is not None and table.schema == self.get_default_schema_name(connection):
- # no schema (i.e. its the default schema), and the table we're
+ # no schema (i.e. its the default schema), and the table we're
# reflecting has the default schema explicit, then use that.
# i.e. try to use the user's conventions
referred_schema = table.schema
@@ -582,7 +582,7 @@ class PGDialect(default.DefaultDialect):
refspec.append(".".join([referred_table, column]))
table.append_constraint(schema.ForeignKeyConstraint(constrained_columns, refspec, conname))
-
+
def _load_domains(self, connection):
## Load data types for domains:
SQL_DOMAINS = """
@@ -606,7 +606,7 @@ class PGDialect(default.DefaultDialect):
## strip (30) from character varying(30)
attype = re.search('([^\(]+)', domain['attype']).group(1)
if domain['visible']:
- # 'visible' just means whether or not the domain is in a
+ # 'visible' just means whether or not the domain is in a
# schema that's on the search path -- or not overriden by
# a schema with higher presedence. If it's not visible,
# it will be prefixed with the schema-name when it's used.
@@ -617,9 +617,9 @@ class PGDialect(default.DefaultDialect):
domains[name] = {'attype':attype, 'nullable': domain['nullable'], 'default': domain['default']}
return domains
-
-
-
+
+
+
class PGCompiler(compiler.DefaultCompiler):
operators = compiler.DefaultCompiler.operators.copy()
operators.update(
@@ -633,7 +633,7 @@ class PGCompiler(compiler.DefaultCompiler):
return None
else:
return "nextval('%s')" % self.preparer.format_sequence(seq)
-
+
def limit_clause(self, select):
text = ""
if select._limit is not None:
@@ -699,7 +699,7 @@ class PGSchemaGenerator(compiler.SchemaGenerator):
else:
colspec += " SERIAL"
else:
- colspec += " " + column.type.dialect_impl(self.dialect, _for_ddl=True).get_col_spec()
+ colspec += " " + column.type.dialect_impl(self.dialect, _for_ddl=column).get_col_spec()
default = self.get_column_default_string(column)
if default is not None:
colspec += " DEFAULT " + default
@@ -712,7 +712,7 @@ class PGSchemaGenerator(compiler.SchemaGenerator):
if not sequence.optional and (not self.checkfirst or not self.dialect.has_sequence(self.connection, sequence.name)):
self.append("CREATE SEQUENCE %s" % self.preparer.format_sequence(sequence))
self.execute()
-
+
def visit_index(self, index):
preparer = self.preparer
self.append("CREATE ")