From 9e621d18aa84bf20640283e1e7a4abd17af47df9 Mon Sep 17 00:00:00 2001 From: Jack Zhou Date: Tue, 29 Jul 2014 11:49:52 -0700 Subject: Added documentation about interaction between `subqueryload` and LIMIT/OFFSET. --- doc/build/faq.rst | 84 ++++++++++++++++++++++++++++++++++++++++++++++ doc/build/orm/loading.rst | 15 +++++++++ doc/build/orm/tutorial.rst | 6 ++++ 3 files changed, 105 insertions(+) diff --git a/doc/build/faq.rst b/doc/build/faq.rst index 0c8314cb5..d642d1de8 100644 --- a/doc/build/faq.rst +++ b/doc/build/faq.rst @@ -622,6 +622,90 @@ The same idea applies to all the other arguments, such as ``foreign_keys``:: foo = relationship(Dest, foreign_keys=[foo_id, bar_id]) +.. _faq_subqueryload_sort: + +Why must I always ``ORDER BY`` a unique column when using ``subqueryload``? +---------------------------------------------------------------------------- + +The SQL standard prescribes that RDBMSs are free to return rows in any order it +deems appropriate, if no ``ORDER BY`` clause is specified. This even extends to +the case where the ``ORDER BY`` clause is not unique across all rows, i.e. rows +with the same value in the ``ORDER BY`` column(s) will not necessarily be +returned in a deterministic order. + +SQLAlchemy implements :func:`.orm.subqueryload` by issuing a separate query +(where the table specified in the relationship is joined to the original query) +and then attempting to match up the results in Python. This works fine +normally: + +.. sourcecode:: python+sql + + >>> session.query(User).options(subqueryload(User.addresses)).all() + {opensql}# the "main" query + SELECT users.id AS users_id + FROM users + {stop} + {opensql}# the "load" query issued by subqueryload + SELECT addresses.id AS addresses_id, addresses.user_id AS addresses_user_id, anon_1.users_id AS anon_1_users_id + FROM (SELECT users.id AS users_id + FROM users) AS anon_1 JOIN addresses ON anon_1.users_id = addresses.user_id ORDER BY anon_1.users_id + +Notice how the main query is a subquery in the load query. When an +``OFFSET``/``LIMIT`` is involved, however, things get a bit tricky: + +.. sourcecode:: python+sql + + >>> user = session.query(User).options(subqueryload(User.addresses)).first() + {opensql}# the "main" query + SELECT users.id AS users_id + FROM users + LIMIT 1 + {stop} + {opensql}# the "load" query issued by subqueryload + SELECT addresses.id AS addresses_id, addresses.user_id AS addresses_user_id, anon_1.users_id AS anon_1_users_id + FROM (SELECT users.id AS users_id + FROM users + LIMIT 1) AS anon_1 JOIN addresses ON anon_1.users_id = addresses.user_id ORDER BY anon_1.users_id + +The main query is still a subquery in the load query, but *it may return a +different set of results in the second query from the first* because it does +not have a deterministic sort order! Depending on database internals, there is +a chance we may get the following resultset for the two queries:: + + +--------+ + |users_id| + +--------+ + | 1| + +--------+ + + +------------+-----------------+---------------+ + |addresses_id|addresses_user_id|anon_1_users_id| + +------------+-----------------+---------------+ + | 3| 2| 2| + +------------+-----------------+---------------+ + | 4| 2| 2| + +------------+-----------------+---------------+ + +From SQLAlchemy's point of view, it didn't get any addresses back for user 1, +so ``user.addresses`` is empty. Oops. + +The solution to this problem is to always specify a deterministic sort order, +so that the main query always returns the same set of rows. This generally +means that you should :meth:`.Query.order_by` on a unique column on the table, +usually the primary key:: + + session.query(User).options(subqueryload(User.addresses)).order_by(User.id).first() + +You can get away with not doing a sort if the ``OFFSET``/``LIMIT`` does not +throw away any rows at all, but it's much simpler to remember to always ``ORDER +BY`` the primary key:: + + session.query(User).options(subqueryload(User.addresses)).filter(User.id == 1).first() + +Note that :func:`.joinedload` does not suffer from the same problem because +only one query is ever issued, so the load query cannot be different from the +main query. + Performance =========== diff --git a/doc/build/orm/loading.rst b/doc/build/orm/loading.rst index 6c2fac004..27846b9b2 100644 --- a/doc/build/orm/loading.rst +++ b/doc/build/orm/loading.rst @@ -120,6 +120,21 @@ query options: # set children to load eagerly with a second statement session.query(Parent).options(subqueryload('children')).all() +.. _subquery_loading_tips: + +Subquery Loading Tips +^^^^^^^^^^^^^^^^^^^^^ + +If you have ``LIMIT`` or ``OFFSET`` in your query, you **must** ``ORDER BY`` a +unique column, generally the primary key of your table, in order to ensure +correct results (see :ref:`faq_subqueryload_sort`):: + + # incorrect + session.query(User).options(subqueryload(User.addresses)).order_by(User.name).first() + + # correct + session.query(User).options(subqueryload(User.addresses)).order_by(User.name, User.id).first() + Loading Along Paths ------------------- diff --git a/doc/build/orm/tutorial.rst b/doc/build/orm/tutorial.rst index f90dc48d2..e75eda1ee 100644 --- a/doc/build/orm/tutorial.rst +++ b/doc/build/orm/tutorial.rst @@ -1703,6 +1703,12 @@ very easy to use: >>> jack.addresses [, ] +.. warning:: + + If you use :func:`.subqueryload`, you should generally + :meth:`.Query.order_by` on a unique column in order to ensure correct + results. See :ref:`subquery_loading_tips`. + Joined Load ------------- -- cgit v1.2.1 From 649f06759d933f4aacdfbb302e845e2bcb5e7641 Mon Sep 17 00:00:00 2001 From: Rodrigo Menezes Date: Thu, 14 Aug 2014 14:47:23 -0400 Subject: Added support for postgres_relkind. --- lib/sqlalchemy/dialects/postgresql/base.py | 44 +++++++++---- lib/sqlalchemy/engine/reflection.py | 4 +- setup.cfg | 7 ++ test/dialect/postgresql/test_reflection.py | 102 ++++++++++++++++++++++++++++- test/requirements.py | 8 +++ 5 files changed, 149 insertions(+), 16 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 5ff2f7c61..b3506f5d2 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1669,11 +1669,12 @@ class PGDialect(default.DefaultDialect): "ops": {} }), (schema.Table, { - "ignore_search_path": False + "ignore_search_path": False, + "relkind": None }) ] - reflection_options = ('postgresql_ignore_search_path', ) + reflection_options = ('postgresql_ignore_search_path', 'postgresql_relkind') _backslash_escapes = True @@ -1898,7 +1899,7 @@ class PGDialect(default.DefaultDialect): return tuple([int(x) for x in m.group(1, 2, 3) if x is not None]) @reflection.cache - def get_table_oid(self, connection, table_name, schema=None, **kw): + def get_table_oid(self, connection, table_name, schema=None, postgresql_relkind=None, **kw): """Fetch the oid for schema.table_name. Several reflection methods require the table oid. The idea for using @@ -1911,13 +1912,28 @@ class PGDialect(default.DefaultDialect): schema_where_clause = "n.nspname = :schema" else: schema_where_clause = "pg_catalog.pg_table_is_visible(c.oid)" + + RELKIND_SYNONYMS = { + 'materialized': 'm', + 'foreign': 'f' + } + ACCEPTED_RELKINDS = ('r','v','m','f') + if postgresql_relkind is None: + postgresql_relkind = 'r' + else: + postgresql_relkind = postgresql_relkind.lower() + if postgresql_relkind in RELKIND_SYNONYMS: + postgresql_relkind = RELKIND_SYNONYMS[postgresql_relkind.lower()] + if postgresql_relkind not in ACCEPTED_RELKINDS: + raise exc.SQLAlchemyError('Invalid postgresql_relkind: %s' % postgresql_relkind) + query = """ SELECT c.oid FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE (%s) - AND c.relname = :table_name AND c.relkind in ('r','v') - """ % schema_where_clause + AND c.relname = :table_name AND c.relkind in ('%s', 'v') + """ % (schema_where_clause, postgresql_relkind) # Since we're binding to unicode, table_name and schema_name must be # unicode. table_name = util.text_type(table_name) @@ -2014,7 +2030,8 @@ class PGDialect(default.DefaultDialect): def get_columns(self, connection, table_name, schema=None, **kw): table_oid = self.get_table_oid(connection, table_name, schema, - info_cache=kw.get('info_cache')) + info_cache=kw.get('info_cache'), + postgresql_relkind=kw.get('postgresql_relkind')) SQL_COLS = """ SELECT a.attname, pg_catalog.format_type(a.atttypid, a.atttypmod), @@ -2164,7 +2181,8 @@ class PGDialect(default.DefaultDialect): @reflection.cache def get_pk_constraint(self, connection, table_name, schema=None, **kw): table_oid = self.get_table_oid(connection, table_name, schema, - info_cache=kw.get('info_cache')) + info_cache=kw.get('info_cache'), + postgresql_relkind=kw.get('postgresql_relkind')) if self.server_version_info < (8, 4): PK_SQL = """ @@ -2214,7 +2232,8 @@ class PGDialect(default.DefaultDialect): postgresql_ignore_search_path=False, **kw): preparer = self.identifier_preparer table_oid = self.get_table_oid(connection, table_name, schema, - info_cache=kw.get('info_cache')) + info_cache=kw.get('info_cache'), + postgresql_relkind=kw.get('postgresql_relkind')) FK_SQL = """ SELECT r.conname, @@ -2318,11 +2337,11 @@ class PGDialect(default.DefaultDialect): @reflection.cache def get_indexes(self, connection, table_name, schema, **kw): table_oid = self.get_table_oid(connection, table_name, schema, - info_cache=kw.get('info_cache')) + info_cache=kw.get('info_cache'), + postgresql_relkind=kw.get('postgresql_relkind')) # cast indkey as varchar since it's an int2vector, # returned as a list by some drivers such as pypostgresql - IDX_SQL = """ SELECT i.relname as relname, @@ -2336,7 +2355,7 @@ class PGDialect(default.DefaultDialect): pg_attribute a on t.oid=a.attrelid and %s WHERE - t.relkind = 'r' + t.relkind IN ('r', 'v', 'f', 'm') and t.oid = :table_oid and ix.indisprimary = 'f' ORDER BY @@ -2391,7 +2410,8 @@ class PGDialect(default.DefaultDialect): def get_unique_constraints(self, connection, table_name, schema=None, **kw): table_oid = self.get_table_oid(connection, table_name, schema, - info_cache=kw.get('info_cache')) + info_cache=kw.get('info_cache'), + postgresql_relkind=kw.get('postgresql_relkind')) UNIQUE_SQL = """ SELECT diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index 012d1d35d..afe9a8b3e 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -378,7 +378,6 @@ class Inspector(object): use :class:`.quoted_name`. """ - return self.dialect.get_indexes(self.bind, table_name, schema, info_cache=self.info_cache, **kw) @@ -405,7 +404,6 @@ class Inspector(object): .. versionadded:: 0.8.4 """ - return self.dialect.get_unique_constraints( self.bind, table_name, schema, info_cache=self.info_cache, **kw) @@ -573,7 +571,7 @@ class Inspector(object): conname, link_to_name=True, **options)) # Indexes - indexes = self.get_indexes(table_name, schema) + indexes = self.get_indexes(table_name, schema, **table.dialect_kwargs) for index_d in indexes: name = index_d['name'] columns = index_d['column_names'] diff --git a/setup.cfg b/setup.cfg index 7517220a6..4ec4b0837 100644 --- a/setup.cfg +++ b/setup.cfg @@ -26,6 +26,13 @@ profile_file=test/profiles.txt # create database link test_link connect to scott identified by tiger using 'xe'; oracle_db_link = test_link +# host name of a postgres database that has the postgres_fdw extension. +# to create this run: +# CREATE EXTENSION postgres_fdw; +# GRANT USAGE ON FOREIGN DATA WRAPPER postgres_fdw TO public; +# this can be localhost to create a loopback foreign table +postgres_test_db_link = localhost + [db] default=sqlite:///:memory: diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index 1d6a41765..313be0b37 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -12,8 +12,108 @@ import sqlalchemy as sa from sqlalchemy.dialects.postgresql import base as postgresql -class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults): +class RelKindReflectionTest(fixtures.TestBase, AssertsExecutionResults): + """Test postgresql_relkind reflection option""" + + __requires__ = 'postgresql_test_dblink', + __only_on__ = 'postgresql >= 9.3' + __backend__ = True + + @classmethod + def setup_class(cls): + from sqlalchemy.testing import config + cls.dblink = config.file_config.get('sqla_testing', 'postgres_test_db_link') + + metadata = MetaData(testing.db) + testtable = Table( + 'testtable', metadata, + Column( + 'id', Integer, primary_key=True), + Column( + 'data', String(30))) + metadata.create_all() + testtable.insert().execute({'id': 89, 'data': 'd1'}) + + con = testing.db.connect() + for ddl in \ + "CREATE MATERIALIZED VIEW test_mview AS SELECT * FROM testtable;", \ + "CREATE SERVER test_server FOREIGN DATA WRAPPER postgres_fdw OPTIONS (dbname 'test', host '%s');" % cls.dblink, \ + "CREATE USER MAPPING FOR public SERVER test_server options (user 'scott', password 'tiger');", \ + "CREATE FOREIGN TABLE test_foreigntable ( \ + id INT, \ + data VARCHAR(30) \ + ) SERVER test_server OPTIONS (table_name 'testtable');": + try: + con.execute(ddl) + except exc.DBAPIError as e: + if 'already exists' not in str(e): + raise e + + @classmethod + def teardown_class(cls): + con = testing.db.connect() + con.execute('DROP FOREIGN TABLE test_foreigntable;') + con.execute('DROP USER MAPPING FOR public SERVER test_server;') + con.execute('DROP SERVER test_server;') + con.execute('DROP MATERIALIZED VIEW test_mview;') + con.execute('DROP TABLE testtable;') + + def test_mview_is_reflected(self): + mview_relkind_names = ('m', 'materialized') + for mview_relkind_name in mview_relkind_names: + metadata = MetaData(testing.db) + table = Table('test_mview', metadata, autoload=True, postgresql_relkind=mview_relkind_name) + eq_(set(table.columns.keys()), set(['id', 'data']), "Columns of reflected mview didn't equal expected columns") + + def test_mview_select(self): + metadata = MetaData(testing.db) + table = Table('test_mview', metadata, autoload=True, postgresql_relkind='m') + assert table.select().execute().fetchall() == [ + (89, 'd1',) + ] + + def test_foreign_table_is_reflected(self): + foreign_table_relkind_names = ('f', 'foreign') + for foreign_table_relkind_name in foreign_table_relkind_names: + metadata = MetaData(testing.db) + table = Table('test_foreigntable', metadata, autoload=True, postgresql_relkind=foreign_table_relkind_name) + eq_(set(table.columns.keys()), set(['id', 'data']), "Columns of reflected foreign table didn't equal expected columns") + + def test_foreign_table_select(self): + metadata = MetaData(testing.db) + table = Table('test_foreigntable', metadata, autoload=True, postgresql_relkind='f') + assert table.select().execute().fetchall() == [ + (89, 'd1',) + ] + + def test_foreign_table_roundtrip(self): + metadata = MetaData(testing.db) + table = Table('test_foreigntable', metadata, autoload=True, postgresql_relkind='f') + + connection = testing.db.connect() + trans = connection.begin() + try: + table.delete().execute() + table.insert().execute({'id': 89, 'data': 'd1'}) + trans.commit() + except: + trans.rollback() + raise + + assert table.select().execute().fetchall() == [ + (89, 'd1',) + ] + + def test_invalid_relkind(self): + metadata = MetaData(testing.db) + def create_bad_table(): + return Table('test_foreigntable', metadata, autoload=True, postgresql_relkind='nope') + + assert_raises(exc.SQLAlchemyError, create_bad_table) + + +class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults): """Test PostgreSQL domains""" __only_on__ = 'postgresql > 8.3' diff --git a/test/requirements.py b/test/requirements.py index e8705d145..927c94bfb 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -716,6 +716,14 @@ class DefaultRequirements(SuiteRequirements): "oracle_db_link option not specified in config" ) + @property + def postgresql_test_dblink(self): + return skip_if( + lambda config: not config.file_config.has_option( + 'sqla_testing', 'postgres_test_db_link'), + "postgres_test_db_link option not specified in config" + ) + @property def percent_schema_names(self): return skip_if( -- cgit v1.2.1 From 4b26bf2e6f254d5cfc0998a9399a310c0f59944b Mon Sep 17 00:00:00 2001 From: Rodrigo Menezes Date: Thu, 14 Aug 2014 17:19:10 -0400 Subject: Add a view synonym too for consistency. --- lib/sqlalchemy/dialects/postgresql/base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 19d2c7ca4..893b4b3c1 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1953,7 +1953,8 @@ class PGDialect(default.DefaultDialect): RELKIND_SYNONYMS = { 'materialized': 'm', - 'foreign': 'f' + 'foreign': 'f', + 'view': 'v' } ACCEPTED_RELKINDS = ('r','v','m','f') if postgresql_relkind is None: -- cgit v1.2.1 From 2f7dce1d6fa43e88f64c81b6e612fbc42235fddd Mon Sep 17 00:00:00 2001 From: Rodrigo Menezes Date: Tue, 26 Aug 2014 12:53:34 -0400 Subject: Removed all mentions to postgresql_relkind --- lib/sqlalchemy/dialects/postgresql/base.py | 38 +++++++----------------------- test/dialect/postgresql/test_reflection.py | 33 +++++++++----------------- 2 files changed, 20 insertions(+), 51 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 893b4b3c1..155136c1d 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1712,7 +1712,7 @@ class PGDialect(default.DefaultDialect): }) ] - reflection_options = ('postgresql_ignore_search_path', 'postgresql_relkind') + reflection_options = ('postgresql_ignore_search_path',) _backslash_escapes = True @@ -1937,7 +1937,7 @@ class PGDialect(default.DefaultDialect): return tuple([int(x) for x in m.group(1, 2, 3) if x is not None]) @reflection.cache - def get_table_oid(self, connection, table_name, schema=None, postgresql_relkind=None, **kw): + def get_table_oid(self, connection, table_name, schema=None, **kw): """Fetch the oid for schema.table_name. Several reflection methods require the table oid. The idea for using @@ -1951,28 +1951,13 @@ class PGDialect(default.DefaultDialect): else: schema_where_clause = "pg_catalog.pg_table_is_visible(c.oid)" - RELKIND_SYNONYMS = { - 'materialized': 'm', - 'foreign': 'f', - 'view': 'v' - } - ACCEPTED_RELKINDS = ('r','v','m','f') - if postgresql_relkind is None: - postgresql_relkind = 'r' - else: - postgresql_relkind = postgresql_relkind.lower() - if postgresql_relkind in RELKIND_SYNONYMS: - postgresql_relkind = RELKIND_SYNONYMS[postgresql_relkind.lower()] - if postgresql_relkind not in ACCEPTED_RELKINDS: - raise exc.SQLAlchemyError('Invalid postgresql_relkind: %s' % postgresql_relkind) - query = """ SELECT c.oid FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE (%s) - AND c.relname = :table_name AND c.relkind in ('%s', 'v') - """ % (schema_where_clause, postgresql_relkind) + AND c.relname = :table_name AND c.relkind in ('r', 'v', 'm', 'f') + """ % schema_where_clause # Since we're binding to unicode, table_name and schema_name must be # unicode. table_name = util.text_type(table_name) @@ -2069,8 +2054,7 @@ class PGDialect(default.DefaultDialect): def get_columns(self, connection, table_name, schema=None, **kw): table_oid = self.get_table_oid(connection, table_name, schema, - info_cache=kw.get('info_cache'), - postgresql_relkind=kw.get('postgresql_relkind')) + info_cache=kw.get('info_cache')) SQL_COLS = """ SELECT a.attname, pg_catalog.format_type(a.atttypid, a.atttypmod), @@ -2224,8 +2208,7 @@ class PGDialect(default.DefaultDialect): @reflection.cache def get_pk_constraint(self, connection, table_name, schema=None, **kw): table_oid = self.get_table_oid(connection, table_name, schema, - info_cache=kw.get('info_cache'), - postgresql_relkind=kw.get('postgresql_relkind')) + info_cache=kw.get('info_cache')) if self.server_version_info < (8, 4): PK_SQL = """ @@ -2275,8 +2258,7 @@ class PGDialect(default.DefaultDialect): postgresql_ignore_search_path=False, **kw): preparer = self.identifier_preparer table_oid = self.get_table_oid(connection, table_name, schema, - info_cache=kw.get('info_cache'), - postgresql_relkind=kw.get('postgresql_relkind')) + info_cache=kw.get('info_cache')) FK_SQL = """ SELECT r.conname, @@ -2380,8 +2362,7 @@ class PGDialect(default.DefaultDialect): @reflection.cache def get_indexes(self, connection, table_name, schema, **kw): table_oid = self.get_table_oid(connection, table_name, schema, - info_cache=kw.get('info_cache'), - postgresql_relkind=kw.get('postgresql_relkind')) + info_cache=kw.get('info_cache')) # cast indkey as varchar since it's an int2vector, # returned as a list by some drivers such as pypostgresql @@ -2453,8 +2434,7 @@ class PGDialect(default.DefaultDialect): def get_unique_constraints(self, connection, table_name, schema=None, **kw): table_oid = self.get_table_oid(connection, table_name, schema, - info_cache=kw.get('info_cache'), - postgresql_relkind=kw.get('postgresql_relkind')) + info_cache=kw.get('info_cache')) UNIQUE_SQL = """ SELECT diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index 678c35881..405ac5921 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -13,8 +13,8 @@ import sqlalchemy as sa from sqlalchemy.dialects.postgresql import base as postgresql -class RelKindReflectionTest(fixtures.TestBase, AssertsExecutionResults): - """Test postgresql_relkind reflection option""" +class AlternateRelkindReflectionTest(fixtures.TestBase, AssertsExecutionResults): + """Test reflection on materialized views and foreign tables""" __requires__ = 'postgresql_test_dblink', __only_on__ = 'postgresql >= 9.3' @@ -61,36 +61,32 @@ class RelKindReflectionTest(fixtures.TestBase, AssertsExecutionResults): con.execute('DROP TABLE testtable;') def test_mview_is_reflected(self): - mview_relkind_names = ('m', 'materialized') - for mview_relkind_name in mview_relkind_names: - metadata = MetaData(testing.db) - table = Table('test_mview', metadata, autoload=True, postgresql_relkind=mview_relkind_name) - eq_(set(table.columns.keys()), set(['id', 'data']), "Columns of reflected mview didn't equal expected columns") + metadata = MetaData(testing.db) + table = Table('test_mview', metadata, autoload=True) + eq_(set(table.columns.keys()), set(['id', 'data']), "Columns of reflected mview didn't equal expected columns") def test_mview_select(self): metadata = MetaData(testing.db) - table = Table('test_mview', metadata, autoload=True, postgresql_relkind='m') + table = Table('test_mview', metadata, autoload=True) assert table.select().execute().fetchall() == [ (89, 'd1',) ] def test_foreign_table_is_reflected(self): - foreign_table_relkind_names = ('f', 'foreign') - for foreign_table_relkind_name in foreign_table_relkind_names: - metadata = MetaData(testing.db) - table = Table('test_foreigntable', metadata, autoload=True, postgresql_relkind=foreign_table_relkind_name) - eq_(set(table.columns.keys()), set(['id', 'data']), "Columns of reflected foreign table didn't equal expected columns") + metadata = MetaData(testing.db) + table = Table('test_foreigntable', metadata, autoload=True) + eq_(set(table.columns.keys()), set(['id', 'data']), "Columns of reflected foreign table didn't equal expected columns") def test_foreign_table_select(self): metadata = MetaData(testing.db) - table = Table('test_foreigntable', metadata, autoload=True, postgresql_relkind='f') + table = Table('test_foreigntable', metadata, autoload=True) assert table.select().execute().fetchall() == [ (89, 'd1',) ] def test_foreign_table_roundtrip(self): metadata = MetaData(testing.db) - table = Table('test_foreigntable', metadata, autoload=True, postgresql_relkind='f') + table = Table('test_foreigntable', metadata, autoload=True) connection = testing.db.connect() trans = connection.begin() @@ -106,13 +102,6 @@ class RelKindReflectionTest(fixtures.TestBase, AssertsExecutionResults): (89, 'd1',) ] - def test_invalid_relkind(self): - metadata = MetaData(testing.db) - def create_bad_table(): - return Table('test_foreigntable', metadata, autoload=True, postgresql_relkind='nope') - - assert_raises(exc.SQLAlchemyError, create_bad_table) - class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults): """Test PostgreSQL domains""" -- cgit v1.2.1 From bcf7a55da01633c4890502463a08cb96af9fe5e9 Mon Sep 17 00:00:00 2001 From: Rodrigo Menezes Date: Tue, 26 Aug 2014 12:56:54 -0400 Subject: Remove relkind from construct arguments. --- lib/sqlalchemy/dialects/postgresql/base.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 155136c1d..75d0696ad 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1707,8 +1707,7 @@ class PGDialect(default.DefaultDialect): "ops": {} }), (schema.Table, { - "ignore_search_path": False, - "relkind": None + "ignore_search_path": False }) ] -- cgit v1.2.1 From d39be884321d0afbae7ef3da556382b53fef8060 Mon Sep 17 00:00:00 2001 From: Rodrigo Menezes Date: Tue, 26 Aug 2014 13:02:19 -0400 Subject: Removed changes that are no longer necessary for postgresql_relkind. Also, removed newline changes. --- lib/sqlalchemy/dialects/postgresql/base.py | 4 ++-- lib/sqlalchemy/engine/reflection.py | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 206a25d28..40b2f60ae 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1781,7 +1781,7 @@ class PGDialect(default.DefaultDialect): }) ] - reflection_options = ('postgresql_ignore_search_path',) + reflection_options = ('postgresql_ignore_search_path', ) _backslash_escapes = True @@ -2019,7 +2019,6 @@ class PGDialect(default.DefaultDialect): schema_where_clause = "n.nspname = :schema" else: schema_where_clause = "pg_catalog.pg_table_is_visible(c.oid)" - query = """ SELECT c.oid FROM pg_catalog.pg_class c @@ -2435,6 +2434,7 @@ class PGDialect(default.DefaultDialect): # cast indkey as varchar since it's an int2vector, # returned as a list by some drivers such as pypostgresql + IDX_SQL = """ SELECT i.relname as relname, diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index afe9a8b3e..012d1d35d 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -378,6 +378,7 @@ class Inspector(object): use :class:`.quoted_name`. """ + return self.dialect.get_indexes(self.bind, table_name, schema, info_cache=self.info_cache, **kw) @@ -404,6 +405,7 @@ class Inspector(object): .. versionadded:: 0.8.4 """ + return self.dialect.get_unique_constraints( self.bind, table_name, schema, info_cache=self.info_cache, **kw) @@ -571,7 +573,7 @@ class Inspector(object): conname, link_to_name=True, **options)) # Indexes - indexes = self.get_indexes(table_name, schema, **table.dialect_kwargs) + indexes = self.get_indexes(table_name, schema) for index_d in indexes: name = index_d['name'] columns = index_d['column_names'] -- cgit v1.2.1 From fbd2d70a5cfd7b5c219c51cb5b7866c4ab89cece Mon Sep 17 00:00:00 2001 From: Rodrigo Menezes Date: Wed, 3 Sep 2014 16:38:43 -0400 Subject: Fixing some pep8s and adding get_foreign_tables. --- lib/sqlalchemy/dialects/postgresql/base.py | 22 ++++++++++++++++++++-- lib/sqlalchemy/engine/interfaces.py | 6 ++++++ test/dialect/postgresql/test_reflection.py | 17 +++++++++++------ test/requirements.py | 2 +- 4 files changed, 38 insertions(+), 9 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 40b2f60ae..69ae6cfed 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -2086,7 +2086,7 @@ class PGDialect(default.DefaultDialect): s = """ SELECT relname FROM pg_class c - WHERE relkind = 'v' + WHERE relkind IN ('m', v') AND '%(schema)s' = (select nspname from pg_namespace n where n.oid = c.relnamespace) """ % dict(schema=current_schema) @@ -2098,6 +2098,24 @@ class PGDialect(default.DefaultDialect): view_names = [row[0] for row in connection.execute(s)] return view_names + @reflection.cache + def get_foreign_table_names(self, connection, schema=None, **kw): + if schema is not None: + current_schema = schema + else: + current_schema = self.default_schema_name + + result = connection.execute( + sql.text("SELECT relname FROM pg_class c " + "WHERE relkind = 'f' " + "AND '%s' = (select nspname from pg_namespace n " + "where n.oid = c.relnamespace) " % + current_schema, + typemap={'relname': sqltypes.Unicode} + ) + ) + return [row[0] for row in result] + @reflection.cache def get_view_definition(self, connection, view_name, schema=None, **kw): if schema is not None: @@ -2434,7 +2452,7 @@ class PGDialect(default.DefaultDialect): # cast indkey as varchar since it's an int2vector, # returned as a list by some drivers such as pypostgresql - + IDX_SQL = """ SELECT i.relname as relname, diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index 71df29cac..e1e346850 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -319,6 +319,12 @@ class Dialect(object): raise NotImplementedError() + def get_foreign_tables(self, connection, view_name, schema=None, **kw): + """Return a list of foreign table names for `schema`.""" + """ + + raise NotImplementedError() + def get_view_definition(self, connection, view_name, schema=None, **kw): """Return view definition. diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index 405ac5921..567aec927 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -13,7 +13,7 @@ import sqlalchemy as sa from sqlalchemy.dialects.postgresql import base as postgresql -class AlternateRelkindReflectionTest(fixtures.TestBase, AssertsExecutionResults): +class AltRelkindReflectionTest(fixtures.TestBase, AssertsExecutionResults): """Test reflection on materialized views and foreign tables""" __requires__ = 'postgresql_test_dblink', @@ -23,7 +23,8 @@ class AlternateRelkindReflectionTest(fixtures.TestBase, AssertsExecutionResults) @classmethod def setup_class(cls): from sqlalchemy.testing import config - cls.dblink = config.file_config.get('sqla_testing', 'postgres_test_db_link') + cls.dblink = config.file_config.get('sqla_testing', + 'postgres_test_db_link') metadata = MetaData(testing.db) testtable = Table( @@ -39,8 +40,10 @@ class AlternateRelkindReflectionTest(fixtures.TestBase, AssertsExecutionResults) for ddl in \ "CREATE MATERIALIZED VIEW test_mview AS SELECT * FROM testtable;", \ - "CREATE SERVER test_server FOREIGN DATA WRAPPER postgres_fdw OPTIONS (dbname 'test', host '%s');" % cls.dblink, \ - "CREATE USER MAPPING FOR public SERVER test_server options (user 'scott', password 'tiger');", \ + "CREATE SERVER test_server FOREIGN DATA WRAPPER postgres_fdw \ + OPTIONS (dbname 'test', host '%s');" % cls.dblink, \ + "CREATE USER MAPPING FOR public \ + SERVER test_server options (user 'scott', password 'tiger');", \ "CREATE FOREIGN TABLE test_foreigntable ( \ id INT, \ data VARCHAR(30) \ @@ -63,7 +66,8 @@ class AlternateRelkindReflectionTest(fixtures.TestBase, AssertsExecutionResults) def test_mview_is_reflected(self): metadata = MetaData(testing.db) table = Table('test_mview', metadata, autoload=True) - eq_(set(table.columns.keys()), set(['id', 'data']), "Columns of reflected mview didn't equal expected columns") + eq_(set(table.columns.keys()), set(['id', 'data']), + "Columns of reflected mview didn't equal expected columns") def test_mview_select(self): metadata = MetaData(testing.db) @@ -75,7 +79,8 @@ class AlternateRelkindReflectionTest(fixtures.TestBase, AssertsExecutionResults) def test_foreign_table_is_reflected(self): metadata = MetaData(testing.db) table = Table('test_foreigntable', metadata, autoload=True) - eq_(set(table.columns.keys()), set(['id', 'data']), "Columns of reflected foreign table didn't equal expected columns") + eq_(set(table.columns.keys()), set(['id', 'data']), + "Columns of reflected foreign table didn't equal expected columns") def test_foreign_table_select(self): metadata = MetaData(testing.db) diff --git a/test/requirements.py b/test/requirements.py index 46c19389a..14bb25691 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -712,7 +712,7 @@ class DefaultRequirements(SuiteRequirements): 'sqla_testing', 'postgres_test_db_link'), "postgres_test_db_link option not specified in config" ) - + @property def percent_schema_names(self): return skip_if( -- cgit v1.2.1 From 619b0be0ce05c394613d8565c08c09cac10cdd88 Mon Sep 17 00:00:00 2001 From: Rodrigo Menezes Date: Fri, 5 Sep 2014 13:37:32 -0400 Subject: Added get_foreign_table_names to interface and put it in the test requirements. --- lib/sqlalchemy/dialects/postgresql/base.py | 1 + lib/sqlalchemy/engine/default.py | 1 + lib/sqlalchemy/engine/interfaces.py | 4 ++-- lib/sqlalchemy/testing/requirements.py | 15 +++++++++++++++ 4 files changed, 19 insertions(+), 2 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 69ae6cfed..f65bc2473 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1753,6 +1753,7 @@ class PGDialect(default.DefaultDialect): supports_default_values = True supports_empty_insert = False + supports_foreign_tables = True supports_multivalues_insert = True default_paramstyle = 'pyformat' ischema_names = ischema_names diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index 2fece76b9..a59efd301 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -107,6 +107,7 @@ class DefaultDialect(interfaces.Dialect): default_paramstyle = 'named' supports_default_values = False supports_empty_insert = True + supports_foreign_tables = False supports_multivalues_insert = False server_version_info = None diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index e1e346850..795be8273 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -319,8 +319,8 @@ class Dialect(object): raise NotImplementedError() - def get_foreign_tables(self, connection, view_name, schema=None, **kw): - """Return a list of foreign table names for `schema`.""" + def get_foreign_table_names(self, connection, schema=None, **kw): + """Return a list of foreign table names for `schema`. """ raise NotImplementedError() diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index a04bcbbdd..f0f0e9e47 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -252,6 +252,15 @@ class SuiteRequirements(Requirements): return exclusions.closed() + @property + def foreign_tables(self): + """target platform supports FOREIGN TABLEs.""" + + return exclusions.only_if( + lambda config: config.db.dialect.supports_foreign_tables, + "%(database)s %(does_support)s 'FOREIGN TABLEs'" + ) + @property def schemas(self): """Target database must support external schemas, and have one @@ -301,6 +310,12 @@ class SuiteRequirements(Requirements): """ return self.views + @property + def foreign_table_reflection(self): + """target database must support inspection of the full CREATE FOREIGN TABLE definition. + """ + return self.foreign_tables + @property def schema_reflection(self): return self.schemas -- cgit v1.2.1 From fd2faa9bc2c6d2d1b0b8e1738f0bce21e2527bb0 Mon Sep 17 00:00:00 2001 From: Rodrigo Menezes Date: Fri, 5 Sep 2014 13:54:48 -0400 Subject: Added documentation. Changed my mind - added get_foreign_table_names() only to PGInspect and not in the Dialect. Added tests for PGInspect and removed a bunch of the old test scaffolding. --- lib/sqlalchemy/dialects/postgresql/base.py | 36 ++++++++++++++---------------- lib/sqlalchemy/engine/default.py | 1 - lib/sqlalchemy/engine/interfaces.py | 6 ----- lib/sqlalchemy/engine/reflection.py | 3 +++ lib/sqlalchemy/testing/requirements.py | 15 ------------- test/dialect/postgresql/test_reflection.py | 6 +++++ 6 files changed, 26 insertions(+), 41 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index f65bc2473..63f6eb891 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1679,6 +1679,23 @@ class PGInspector(reflection.Inspector): schema = schema or self.default_schema_name return self.dialect._load_enums(self.bind, schema) + def get_foreign_table_names(self, connection, schema=None, **kw): + if schema is not None: + current_schema = schema + else: + current_schema = self.default_schema_name + + result = connection.execute( + sql.text("SELECT relname FROM pg_class c " + "WHERE relkind = 'f' " + "AND '%s' = (select nspname from pg_namespace n " + "where n.oid = c.relnamespace) " % + current_schema, + typemap={'relname': sqltypes.Unicode} + ) + ) + return [row[0] for row in result] + class CreateEnumType(schema._CreateDropBase): __visit_name__ = "create_enum_type" @@ -1753,7 +1770,6 @@ class PGDialect(default.DefaultDialect): supports_default_values = True supports_empty_insert = False - supports_foreign_tables = True supports_multivalues_insert = True default_paramstyle = 'pyformat' ischema_names = ischema_names @@ -2099,24 +2115,6 @@ class PGDialect(default.DefaultDialect): view_names = [row[0] for row in connection.execute(s)] return view_names - @reflection.cache - def get_foreign_table_names(self, connection, schema=None, **kw): - if schema is not None: - current_schema = schema - else: - current_schema = self.default_schema_name - - result = connection.execute( - sql.text("SELECT relname FROM pg_class c " - "WHERE relkind = 'f' " - "AND '%s' = (select nspname from pg_namespace n " - "where n.oid = c.relnamespace) " % - current_schema, - typemap={'relname': sqltypes.Unicode} - ) - ) - return [row[0] for row in result] - @reflection.cache def get_view_definition(self, connection, view_name, schema=None, **kw): if schema is not None: diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index a59efd301..2fece76b9 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -107,7 +107,6 @@ class DefaultDialect(interfaces.Dialect): default_paramstyle = 'named' supports_default_values = False supports_empty_insert = True - supports_foreign_tables = False supports_multivalues_insert = False server_version_info = None diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index 795be8273..71df29cac 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -319,12 +319,6 @@ class Dialect(object): raise NotImplementedError() - def get_foreign_table_names(self, connection, schema=None, **kw): - """Return a list of foreign table names for `schema`. - """ - - raise NotImplementedError() - def get_view_definition(self, connection, view_name, schema=None, **kw): """Return view definition. diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index 012d1d35d..f41ea1fa2 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -227,6 +227,9 @@ class Inspector(object): :param schema: Optional, retrieve names from a non-default schema. For special quoting, use :class:`.quoted_name`. + .. versionchanged:: 1.0.0 now returns materialized views as well + as normal views. + """ return self.dialect.get_view_names(self.bind, schema, diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index f0f0e9e47..a04bcbbdd 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -252,15 +252,6 @@ class SuiteRequirements(Requirements): return exclusions.closed() - @property - def foreign_tables(self): - """target platform supports FOREIGN TABLEs.""" - - return exclusions.only_if( - lambda config: config.db.dialect.supports_foreign_tables, - "%(database)s %(does_support)s 'FOREIGN TABLEs'" - ) - @property def schemas(self): """Target database must support external schemas, and have one @@ -310,12 +301,6 @@ class SuiteRequirements(Requirements): """ return self.views - @property - def foreign_table_reflection(self): - """target database must support inspection of the full CREATE FOREIGN TABLE definition. - """ - return self.foreign_tables - @property def schema_reflection(self): return self.schemas diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index 567aec927..3bc4cd715 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -107,6 +107,12 @@ class AltRelkindReflectionTest(fixtures.TestBase, AssertsExecutionResults): (89, 'd1',) ] + def test_get_foreign_table_names(self): + inspector = inspect(testing.db) + connection = testing.db.connect() + ft_names = inspector.get_foreign_table_names(connection) + assert u'test_foreigntable' in ft_names + class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults): """Test PostgreSQL domains""" -- cgit v1.2.1 From a23264e1dc43b1250b9b5de541ff27bd49a2b2c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ilja=20Everil=C3=A4?= Date: Wed, 10 Sep 2014 11:33:49 +0300 Subject: tests for FILTER (WHERE ...) --- test/sql/test_compiler.py | 64 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py index d47b58f1f..6e730ad50 100644 --- a/test/sql/test_compiler.py +++ b/test/sql/test_compiler.py @@ -2190,6 +2190,70 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): "(ORDER BY mytable.myid + :myid_1) AS anon_1 FROM mytable" ) + def test_aggregate_filter(self): + self.assert_compile( + func.count(1).filter(), + "count(:param_1)" + ) + self.assert_compile( + func.count(1).filter( + table1.c.name != None + ), + "count(:param_1) FILTER (WHERE mytable.name IS NOT NULL)" + ) + self.assert_compile( + func.count(1).filter( + table1.c.name == None, + table1.c.myid > 0 + ), + "count(:param_1) FILTER (WHERE mytable.name IS NULL AND " + "mytable.myid > :myid_1)" + ) + + self.assert_compile( + select([func.count(1).filter( + table1.c.description != None + ).label('foo')]), + "SELECT count(:param_1) FILTER (WHERE mytable.description " + "IS NOT NULL) AS foo FROM mytable" + ) + + # test from_obj generation. + # from func: + self.assert_compile( + select([ + func.max(table1.c.name).filter( + literal_column('description') != None + ) + ]), + "SELECT max(mytable.name) FILTER (WHERE description " + "IS NOT NULL) AS anon_1 FROM mytable" + ) + # from criterion: + self.assert_compile( + select([ + func.count(1).filter( + table1.c.name == 'name' + ) + ]), + "SELECT count(:param_1) FILTER (WHERE mytable.name = :name_1) " + "AS anon_1 FROM mytable" + ) + + # test chaining: + self.assert_compile( + select([ + func.count(1).filter( + table1.c.name == 'name' + ).filter( + table1.c.description == 'description' + ) + ]), + "SELECT count(:param_1) FILTER (WHERE " + "mytable.name = :name_1 AND mytable.description = :description_1) " + "AS anon_1 FROM mytable" + ) + def test_date_between(self): import datetime table = Table('dt', metadata, -- cgit v1.2.1 From ad82849bbe4ef329129204d02781f737c0c79fcb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ilja=20Everil=C3=A4?= Date: Wed, 10 Sep 2014 11:34:33 +0300 Subject: implementation for FILTER (WHERE ...) --- lib/sqlalchemy/__init__.py | 1 + lib/sqlalchemy/sql/__init__.py | 1 + lib/sqlalchemy/sql/compiler.py | 6 ++++ lib/sqlalchemy/sql/elements.py | 65 ++++++++++++++++++++++++++++++++++++++++ lib/sqlalchemy/sql/expression.py | 4 ++- lib/sqlalchemy/sql/functions.py | 24 ++++++++++++++- 6 files changed, 99 insertions(+), 2 deletions(-) diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 853566172..1af0de3ba 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -7,6 +7,7 @@ from .sql import ( + aggregatefilter, alias, and_, asc, diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py index 4d013859c..8fbf1b536 100644 --- a/lib/sqlalchemy/sql/__init__.py +++ b/lib/sqlalchemy/sql/__init__.py @@ -19,6 +19,7 @@ from .expression import ( Selectable, TableClause, Update, + aggregatefilter, alias, and_, asc, diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 5149fa4fe..6ebd61e9c 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -760,6 +760,12 @@ class SQLCompiler(Compiled): ) ) + def visit_aggregatefilter(self, aggregatefilter, **kwargs): + return "%s FILTER (WHERE %s)" % ( + aggregatefilter.func._compiler_dispatch(self, **kwargs), + aggregatefilter.criterion._compiler_dispatch(self, **kwargs) + ) + def visit_extract(self, extract, **kwargs): field = self.extract_map.get(extract.field, extract.field) return "EXTRACT(%s FROM %s)" % ( diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 8ec0aa700..5562e80d7 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -2888,6 +2888,71 @@ class Over(ColumnElement): )) +class AggregateFilter(ColumnElement): + """Represent an aggregate FILTER clause. + + This is a special operator against aggregate functions, + which controls which rows are passed to it. + It's supported only by certain database backends. + + """ + __visit_name__ = 'aggregatefilter' + + criterion = None + + def __init__(self, func, *criterion): + """Produce an :class:`.AggregateFilter` object against a function. + + Used against aggregate functions, + for database backends that support aggregate "FILTER" clause. + + E.g.:: + + from sqlalchemy import aggregatefilter + aggregatefilter(func.count(1), MyClass.name == 'some name') + + Would produce "COUNT(1) FILTER (WHERE myclass.name = 'some name')". + + This function is also available from the :data:`~.expression.func` + construct itself via the :meth:`.FunctionElement.filter` method. + + """ + self.func = func + self.filter(*criterion) + + def filter(self, *criterion): + for criterion in list(criterion): + criterion = _expression_literal_as_text(criterion) + + if self.criterion is not None: + self.criterion = self.criterion & criterion + else: + self.criterion = criterion + + return self + + @util.memoized_property + def type(self): + return self.func.type + + def get_children(self, **kwargs): + return [c for c in + (self.func, self.criterion) + if c is not None] + + def _copy_internals(self, clone=_clone, **kw): + self.func = clone(self.func, **kw) + if self.criterion is not None: + self.criterion = clone(self.criterion, **kw) + + @property + def _from_objects(self): + return list(itertools.chain( + *[c._from_objects for c in (self.func, self.criterion) + if c is not None] + )) + + class Label(ColumnElement): """Represents a column label (AS). diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py index d96f048b9..7b22cab3e 100644 --- a/lib/sqlalchemy/sql/expression.py +++ b/lib/sqlalchemy/sql/expression.py @@ -36,7 +36,7 @@ from .elements import ClauseElement, ColumnElement,\ True_, False_, BinaryExpression, Tuple, TypeClause, Extract, \ Grouping, not_, \ collate, literal_column, between,\ - literal, outparam, type_coerce, ClauseList + literal, outparam, type_coerce, ClauseList, AggregateFilter from .elements import SavepointClause, RollbackToSavepointClause, \ ReleaseSavepointClause @@ -97,6 +97,8 @@ outerjoin = public_factory(Join._create_outerjoin, ".expression.outerjoin") insert = public_factory(Insert, ".expression.insert") update = public_factory(Update, ".expression.update") delete = public_factory(Delete, ".expression.delete") +aggregatefilter = public_factory( + AggregateFilter, ".expression.aggregatefilter") # internal functions still being called from tests and the ORM, diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index 7efb1e916..46f3e27dc 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -12,7 +12,7 @@ from . import sqltypes, schema from .base import Executable, ColumnCollection from .elements import ClauseList, Cast, Extract, _literal_as_binds, \ literal_column, _type_from_args, ColumnElement, _clone,\ - Over, BindParameter + Over, BindParameter, AggregateFilter from .selectable import FromClause, Select, Alias from . import operators @@ -116,6 +116,28 @@ class FunctionElement(Executable, ColumnElement, FromClause): """ return Over(self, partition_by=partition_by, order_by=order_by) + def filter(self, *criterion): + """Produce a FILTER clause against this function. + + Used against aggregate functions, + for database backends that support aggregate "FILTER" clause. + + The expression:: + + func.count(1).filter(True) + + is shorthand for:: + + from sqlalchemy import aggregatefilter + aggregatefilter(func.count(1), True) + + See :func:`~.expression.aggregatefilter` for a full description. + + """ + if not criterion: + return self + return AggregateFilter(self, *criterion) + @property def _from_objects(self): return self.clauses._from_objects -- cgit v1.2.1 From a3cd517c95e5f341c711c41474de14db97bb7778 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ilja=20Everil=C3=A4?= Date: Wed, 10 Sep 2014 12:07:38 +0300 Subject: add ClauseTest for aggregatefilter --- test/sql/test_generative.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/test/sql/test_generative.py b/test/sql/test_generative.py index 013ba8082..1b67ab68c 100644 --- a/test/sql/test_generative.py +++ b/test/sql/test_generative.py @@ -539,6 +539,11 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): expr2 = CloningVisitor().traverse(expr) assert str(expr) == str(expr2) + def test_aggregatefilter(self): + expr = func.count(1).filter(t1.c.col1 > 1) + expr2 = CloningVisitor().traverse(expr) + assert str(expr) == str(expr2) + def test_adapt_union(self): u = union( t1.select().where(t1.c.col1 == 4), -- cgit v1.2.1 From ab1c25266dd49f087b5fff316b6ba6fb610b1d35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ilja=20Everil=C3=A4?= Date: Thu, 11 Sep 2014 15:29:33 +0300 Subject: renamed aggregatefilter to funcfilter, since it is that --- lib/sqlalchemy/__init__.py | 2 +- lib/sqlalchemy/sql/__init__.py | 2 +- lib/sqlalchemy/sql/compiler.py | 6 +++--- lib/sqlalchemy/sql/elements.py | 18 +++++++++--------- lib/sqlalchemy/sql/expression.py | 6 +++--- lib/sqlalchemy/sql/functions.py | 14 +++++++------- test/sql/test_compiler.py | 2 +- test/sql/test_generative.py | 2 +- 8 files changed, 26 insertions(+), 26 deletions(-) diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 1af0de3ba..d184e1fbf 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -7,7 +7,6 @@ from .sql import ( - aggregatefilter, alias, and_, asc, @@ -26,6 +25,7 @@ from .sql import ( extract, false, func, + funcfilter, insert, intersect, intersect_all, diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py index 8fbf1b536..351e08d0b 100644 --- a/lib/sqlalchemy/sql/__init__.py +++ b/lib/sqlalchemy/sql/__init__.py @@ -19,7 +19,6 @@ from .expression import ( Selectable, TableClause, Update, - aggregatefilter, alias, and_, asc, @@ -39,6 +38,7 @@ from .expression import ( false, False_, func, + funcfilter, insert, intersect, intersect_all, diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 6ebd61e9c..d59012d12 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -760,10 +760,10 @@ class SQLCompiler(Compiled): ) ) - def visit_aggregatefilter(self, aggregatefilter, **kwargs): + def visit_funcfilter(self, funcfilter, **kwargs): return "%s FILTER (WHERE %s)" % ( - aggregatefilter.func._compiler_dispatch(self, **kwargs), - aggregatefilter.criterion._compiler_dispatch(self, **kwargs) + funcfilter.func._compiler_dispatch(self, **kwargs), + funcfilter.criterion._compiler_dispatch(self, **kwargs) ) def visit_extract(self, extract, **kwargs): diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 5562e80d7..5ac16ab7a 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -2888,28 +2888,28 @@ class Over(ColumnElement): )) -class AggregateFilter(ColumnElement): - """Represent an aggregate FILTER clause. +class FunctionFilter(ColumnElement): + """Represent a function FILTER clause. - This is a special operator against aggregate functions, + This is a special operator against aggregate and window functions, which controls which rows are passed to it. It's supported only by certain database backends. """ - __visit_name__ = 'aggregatefilter' + __visit_name__ = 'funcfilter' criterion = None def __init__(self, func, *criterion): - """Produce an :class:`.AggregateFilter` object against a function. + """Produce an :class:`.FunctionFilter` object against a function. - Used against aggregate functions, - for database backends that support aggregate "FILTER" clause. + Used against aggregate and window functions, + for database backends that support the "FILTER" clause. E.g.:: - from sqlalchemy import aggregatefilter - aggregatefilter(func.count(1), MyClass.name == 'some name') + from sqlalchemy import funcfilter + funcfilter(func.count(1), MyClass.name == 'some name') Would produce "COUNT(1) FILTER (WHERE myclass.name = 'some name')". diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py index 7b22cab3e..2e10b7370 100644 --- a/lib/sqlalchemy/sql/expression.py +++ b/lib/sqlalchemy/sql/expression.py @@ -36,7 +36,7 @@ from .elements import ClauseElement, ColumnElement,\ True_, False_, BinaryExpression, Tuple, TypeClause, Extract, \ Grouping, not_, \ collate, literal_column, between,\ - literal, outparam, type_coerce, ClauseList, AggregateFilter + literal, outparam, type_coerce, ClauseList, FunctionFilter from .elements import SavepointClause, RollbackToSavepointClause, \ ReleaseSavepointClause @@ -97,8 +97,8 @@ outerjoin = public_factory(Join._create_outerjoin, ".expression.outerjoin") insert = public_factory(Insert, ".expression.insert") update = public_factory(Update, ".expression.update") delete = public_factory(Delete, ".expression.delete") -aggregatefilter = public_factory( - AggregateFilter, ".expression.aggregatefilter") +funcfilter = public_factory( + FunctionFilter, ".expression.funcfilter") # internal functions still being called from tests and the ORM, diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index 46f3e27dc..a07eca8c6 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -12,7 +12,7 @@ from . import sqltypes, schema from .base import Executable, ColumnCollection from .elements import ClauseList, Cast, Extract, _literal_as_binds, \ literal_column, _type_from_args, ColumnElement, _clone,\ - Over, BindParameter, AggregateFilter + Over, BindParameter, FunctionFilter from .selectable import FromClause, Select, Alias from . import operators @@ -119,8 +119,8 @@ class FunctionElement(Executable, ColumnElement, FromClause): def filter(self, *criterion): """Produce a FILTER clause against this function. - Used against aggregate functions, - for database backends that support aggregate "FILTER" clause. + Used against aggregate and window functions, + for database backends that support the "FILTER" clause. The expression:: @@ -128,15 +128,15 @@ class FunctionElement(Executable, ColumnElement, FromClause): is shorthand for:: - from sqlalchemy import aggregatefilter - aggregatefilter(func.count(1), True) + from sqlalchemy import funcfilter + funcfilter(func.count(1), True) - See :func:`~.expression.aggregatefilter` for a full description. + See :func:`~.expression.funcfilter` for a full description. """ if not criterion: return self - return AggregateFilter(self, *criterion) + return FunctionFilter(self, *criterion) @property def _from_objects(self): diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py index 6e730ad50..7bba29563 100644 --- a/test/sql/test_compiler.py +++ b/test/sql/test_compiler.py @@ -2190,7 +2190,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): "(ORDER BY mytable.myid + :myid_1) AS anon_1 FROM mytable" ) - def test_aggregate_filter(self): + def test_funcfilter(self): self.assert_compile( func.count(1).filter(), "count(:param_1)" diff --git a/test/sql/test_generative.py b/test/sql/test_generative.py index 1b67ab68c..6044cecb0 100644 --- a/test/sql/test_generative.py +++ b/test/sql/test_generative.py @@ -539,7 +539,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): expr2 = CloningVisitor().traverse(expr) assert str(expr) == str(expr2) - def test_aggregatefilter(self): + def test_funcfilter(self): expr = func.count(1).filter(t1.c.col1 > 1) expr2 = CloningVisitor().traverse(expr) assert str(expr) == str(expr2) -- cgit v1.2.1 From 52a095ba6675f5f5807a1dc655b4ae32b9999f27 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ilja=20Everil=C3=A4?= Date: Thu, 11 Sep 2014 15:39:56 +0300 Subject: allow windowing filtered functions --- lib/sqlalchemy/sql/elements.py | 20 ++++++++++++++++++++ test/sql/test_compiler.py | 27 +++++++++++++++++++++++++++ 2 files changed, 47 insertions(+) diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 5ac16ab7a..62fe6553a 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -2931,6 +2931,26 @@ class FunctionFilter(ColumnElement): return self + def over(self, partition_by=None, order_by=None): + """Produce an OVER clause against this filtered function. + + Used against aggregate or so-called "window" functions, + for database backends that support window functions. + + The expression:: + + func.rank().filter(MyClass.y > 5).over(order_by='x') + + is shorthand for:: + + from sqlalchemy import over, funcfilter + over(funcfilter(func.rank(), MyClass.y > 5), order_by='x') + + See :func:`~.expression.over` for a full description. + + """ + return Over(self, partition_by=partition_by, order_by=order_by) + @util.memoized_property def type(self): return self.func.type diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py index 7bba29563..fc33db184 100644 --- a/test/sql/test_compiler.py +++ b/test/sql/test_compiler.py @@ -2254,6 +2254,33 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): "AS anon_1 FROM mytable" ) + # test filtered windowing: + self.assert_compile( + select([ + func.rank().filter( + table1.c.name > 'foo' + ).over( + order_by=table1.c.name + ) + ]), + "SELECT rank() FILTER (WHERE mytable.name > :name_1) " + "OVER (ORDER BY mytable.name) AS anon_1 FROM mytable" + ) + + self.assert_compile( + select([ + func.rank().filter( + table1.c.name > 'foo' + ).over( + order_by=table1.c.name, + partition_by=['description'] + ) + ]), + "SELECT rank() FILTER (WHERE mytable.name > :name_1) " + "OVER (PARTITION BY mytable.description ORDER BY mytable.name) " + "AS anon_1 FROM mytable" + ) + def test_date_between(self): import datetime table = Table('dt', metadata, -- cgit v1.2.1 From 89fc7d65b9ac12dd70d48c8d3be04bd50e696ce6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ilja=20Everil=C3=A4?= Date: Thu, 11 Sep 2014 15:47:24 +0300 Subject: documentation indentation fix --- lib/sqlalchemy/sql/elements.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 62fe6553a..c1c4fc1e1 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -2908,8 +2908,8 @@ class FunctionFilter(ColumnElement): E.g.:: - from sqlalchemy import funcfilter - funcfilter(func.count(1), MyClass.name == 'some name') + from sqlalchemy import funcfilter + funcfilter(func.count(1), MyClass.name == 'some name') Would produce "COUNT(1) FILTER (WHERE myclass.name = 'some name')". -- cgit v1.2.1 From 76c06aa65345b47af38a0a1d20638dfbc890b640 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ilja=20Everil=C3=A4?= Date: Thu, 11 Sep 2014 15:49:51 +0300 Subject: method documentation typo fix --- lib/sqlalchemy/sql/elements.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index c1c4fc1e1..53838358d 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -2901,7 +2901,7 @@ class FunctionFilter(ColumnElement): criterion = None def __init__(self, func, *criterion): - """Produce an :class:`.FunctionFilter` object against a function. + """Produce a :class:`.FunctionFilter` object against a function. Used against aggregate and window functions, for database backends that support the "FILTER" clause. -- cgit v1.2.1 From 98a011453537f1516def7acaa25c88ea942965ad Mon Sep 17 00:00:00 2001 From: jona Date: Fri, 12 Sep 2014 21:14:52 +0200 Subject: add failing test --- test/orm/test_cascade.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/test/orm/test_cascade.py b/test/orm/test_cascade.py index bd6a17286..e39911d0f 100644 --- a/test/orm/test_cascade.py +++ b/test/orm/test_cascade.py @@ -1,3 +1,4 @@ +import copy from sqlalchemy.testing import assert_raises, assert_raises_message from sqlalchemy import Integer, String, ForeignKey, Sequence, \ @@ -13,6 +14,7 @@ from sqlalchemy.testing import eq_ from sqlalchemy.testing import fixtures from test.orm import _fixtures + class CascadeArgTest(fixtures.MappedTest): run_inserts = None run_create_tables = None @@ -85,6 +87,12 @@ class CascadeArgTest(fixtures.MappedTest): orm_util.CascadeOptions("all, delete-orphan"), frozenset) + def test_cascade_deepcopy(self): + old = orm_util.CascadeOptions("all, delete-orphan") + new = copy.deepcopy(old) + eq_(old, new) + + def test_cascade_assignable(self): User, Address = self.classes.User, self.classes.Address users, addresses = self.tables.users, self.tables.addresses -- cgit v1.2.1 From ba951b99a6f3bd78a3a90546f0ae686d31c3bfc8 Mon Sep 17 00:00:00 2001 From: jona Date: Tue, 16 Sep 2014 20:19:03 +0200 Subject: change functions --- lib/sqlalchemy/orm/util.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index 734f9d5e6..3bb2685fb 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -30,13 +30,10 @@ class CascadeOptions(frozenset): 'all', 'none', 'delete-orphan']) _allowed_cascades = all_cascades - def __new__(cls, arg): - values = set([ - c for c - in re.split('\s*,\s*', arg or "") - if c - ]) - + def __new__(cls, value_list): + if isinstance(value_list, str) or value_list is None: + return cls.from_string(value_list) + values = set(value_list) if values.difference(cls._allowed_cascades): raise sa_exc.ArgumentError( "Invalid cascade option(s): %s" % @@ -70,6 +67,14 @@ class CascadeOptions(frozenset): ",".join([x for x in sorted(self)]) ) + @classmethod + def from_string(cls, arg): + values = [ + c for c + in re.split('\s*,\s*', arg or "") + if c + ] + return cls(values) def _validator_events( desc, key, validator, include_removes, include_backrefs): -- cgit v1.2.1 From 4f39e3839f8a57cceb3e959eb99b768216abecdc Mon Sep 17 00:00:00 2001 From: Jan Date: Tue, 16 Sep 2014 20:54:27 +0200 Subject: Added EXASolution dialect to documentation --- doc/build/dialects/index.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst index 01ff65995..7d94089a4 100644 --- a/doc/build/dialects/index.rst +++ b/doc/build/dialects/index.rst @@ -48,6 +48,7 @@ Production Ready developed jointly by IBM and SQLAlchemy developers. * `redshift-sqlalchemy `_ - driver for Amazon Redshift, adapts the existing Postgresql/psycopg2 driver. +* `sqlalchemy_exasol `_ - driver for EXASolution. * `sqlalchemy-sqlany `_ - driver for SAP Sybase SQL Anywhere, developed by SAP. * `sqlalchemy-monetdb `_ - driver for MonetDB. -- cgit v1.2.1 From cc3dba01db0367d4172cca1b902976ac7718e4cf Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 16 Sep 2014 16:20:22 -0400 Subject: - raise from cause here to preserve stack trace --- lib/sqlalchemy/testing/exclusions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py index 283d89e36..49211f805 100644 --- a/lib/sqlalchemy/testing/exclusions.py +++ b/lib/sqlalchemy/testing/exclusions.py @@ -133,7 +133,7 @@ class compound(object): name, fail._as_string(config), str(ex)))) break else: - raise ex + util.raise_from_cause(ex) def _expect_success(self, config, name='block'): if not self.fails: -- cgit v1.2.1 From 8546153d5d0371e452e85b5f10232d75bce04976 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 16 Sep 2014 17:24:34 -0400 Subject: - break out and fix tests for materialized view and foreign tables. foreign tables not working --- test/dialect/postgresql/test_reflection.py | 158 ++++++++++++++++------------- 1 file changed, 86 insertions(+), 72 deletions(-) diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index 3bc4cd715..04ac41f1d 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -13,68 +13,37 @@ import sqlalchemy as sa from sqlalchemy.dialects.postgresql import base as postgresql -class AltRelkindReflectionTest(fixtures.TestBase, AssertsExecutionResults): - """Test reflection on materialized views and foreign tables""" +class ForeignTableReflectionTest(fixtures.TablesTest, AssertsExecutionResults): + """Test reflection on foreign tables""" __requires__ = 'postgresql_test_dblink', __only_on__ = 'postgresql >= 9.3' __backend__ = True @classmethod - def setup_class(cls): + def define_tables(cls, metadata): from sqlalchemy.testing import config - cls.dblink = config.file_config.get('sqla_testing', - 'postgres_test_db_link') - - metadata = MetaData(testing.db) - testtable = Table( - 'testtable', metadata, - Column( - 'id', Integer, primary_key=True), - Column( - 'data', String(30))) - metadata.create_all() - testtable.insert().execute({'id': 89, 'data': 'd1'}) - - con = testing.db.connect() - - for ddl in \ - "CREATE MATERIALIZED VIEW test_mview AS SELECT * FROM testtable;", \ - "CREATE SERVER test_server FOREIGN DATA WRAPPER postgres_fdw \ - OPTIONS (dbname 'test', host '%s');" % cls.dblink, \ - "CREATE USER MAPPING FOR public \ - SERVER test_server options (user 'scott', password 'tiger');", \ - "CREATE FOREIGN TABLE test_foreigntable ( \ - id INT, \ - data VARCHAR(30) \ - ) SERVER test_server OPTIONS (table_name 'testtable');": - try: - con.execute(ddl) - except exc.DBAPIError as e: - if 'already exists' not in str(e): - raise e - - @classmethod - def teardown_class(cls): - con = testing.db.connect() - con.execute('DROP FOREIGN TABLE test_foreigntable;') - con.execute('DROP USER MAPPING FOR public SERVER test_server;') - con.execute('DROP SERVER test_server;') - con.execute('DROP MATERIALIZED VIEW test_mview;') - con.execute('DROP TABLE testtable;') - - def test_mview_is_reflected(self): - metadata = MetaData(testing.db) - table = Table('test_mview', metadata, autoload=True) - eq_(set(table.columns.keys()), set(['id', 'data']), - "Columns of reflected mview didn't equal expected columns") + dblink = config.file_config.get( + 'sqla_testing', 'postgres_test_db_link') + + for ddl in [ + "CREATE SERVER test_server FOREIGN DATA WRAPPER postgres_fdw " + "OPTIONS (dbname 'test', host '%s')" % dblink, + "CREATE USER MAPPING FOR scott \ + SERVER test_server options (user 'scott', password 'tiger')", + "CREATE FOREIGN TABLE test_foreigntable ( " + " id INT, " + " data VARCHAR(30) " + ") SERVER test_server OPTIONS (table_name 'testtable')", + ]: + sa.event.listen(metadata, "after_create", sa.DDL(ddl)) - def test_mview_select(self): - metadata = MetaData(testing.db) - table = Table('test_mview', metadata, autoload=True) - assert table.select().execute().fetchall() == [ - (89, 'd1',) - ] + for ddl in [ + 'DROP FOREIGN TABLE test_foreigntable', + 'DROP USER MAPPING FOR scott SERVER test_server', + "DROP SERVER test_server" + ]: + sa.event.listen(metadata, "before_drop", sa.DDL(ddl)) def test_foreign_table_is_reflected(self): metadata = MetaData(testing.db) @@ -85,33 +54,78 @@ class AltRelkindReflectionTest(fixtures.TestBase, AssertsExecutionResults): def test_foreign_table_select(self): metadata = MetaData(testing.db) table = Table('test_foreigntable', metadata, autoload=True) - assert table.select().execute().fetchall() == [ - (89, 'd1',) - ] + + with testing.db.begin() as conn: + eq_( + conn.execute(table.select()).fetchall(), + [(89, 'd1',)] + ) def test_foreign_table_roundtrip(self): metadata = MetaData(testing.db) table = Table('test_foreigntable', metadata, autoload=True) - connection = testing.db.connect() - trans = connection.begin() - try: - table.delete().execute() - table.insert().execute({'id': 89, 'data': 'd1'}) - trans.commit() - except: - trans.rollback() - raise + with testing.db.begin() as conn: + conn.execute(table.delete()) + conn.execute(table.insert(), {'id': 89, 'data': 'd1'}) - assert table.select().execute().fetchall() == [ - (89, 'd1',) - ] + eq_( + testing.db.execute(table.select()).fetchall(), + [(89, 'd1',)] + ) def test_get_foreign_table_names(self): inspector = inspect(testing.db) - connection = testing.db.connect() - ft_names = inspector.get_foreign_table_names(connection) - assert u'test_foreigntable' in ft_names + with testing.db.connect() as conn: + ft_names = inspector.get_foreign_table_names(conn) + eq_(ft_names, ['test_foreigntable']) + + +class MaterialiedViewReflectionTest( + fixtures.TablesTest, AssertsExecutionResults): + """Test reflection on materialized views""" + + __only_on__ = 'postgresql >= 9.3' + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + testtable = Table( + 'testtable', metadata, + Column('id', Integer, primary_key=True), + Column('data', String(30))) + + # insert data before we create the view + @sa.event.listens_for(testtable, "after_create") + def insert_data(target, connection, **kw): + connection.execute( + target.insert(), + {"id": 89, "data": 'd1'} + ) + + view = sa.DDL( + "CREATE MATERIALIZED VIEW test_mview AS " + "SELECT * FROM testtable") + + sa.event.listen(testtable, 'after_create', view) + sa.event.listen( + testtable, 'before_drop', + sa.DDL("DROP MATERIALIZED VIEW test_mview") + ) + + def test_mview_is_reflected(self): + metadata = MetaData(testing.db) + table = Table('test_mview', metadata, autoload=True) + eq_(set(table.columns.keys()), set(['id', 'data']), + "Columns of reflected mview didn't equal expected columns") + + def test_mview_select(self): + metadata = MetaData(testing.db) + table = Table('test_mview', metadata, autoload=True) + eq_( + table.select().execute().fetchall(), + [(89, 'd1',)] + ) class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults): -- cgit v1.2.1 From a985f84ed6223e7a7348dd6126f8de92012b635f Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 16 Sep 2014 17:40:06 -0400 Subject: - Fixed the version string detection in the pymssql dialect to work with Microsoft SQL Azure, which changes the word "SQL Server" to "SQL Azure". fixes #3151 --- doc/build/changelog/changelog_09.rst | 9 +++++++++ lib/sqlalchemy/dialects/mssql/pymssql.py | 2 +- test/dialect/mssql/test_engine.py | 20 ++++++++++++++++++++ 3 files changed, 30 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index 329e054b0..91e66bded 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -13,6 +13,15 @@ .. changelog:: :version: 0.9.8 + .. change:: + :tags: bug, mssql + :versions: 1.0.0 + :tickets: 3151 + + Fixed the version string detection in the pymssql dialect to + work with Microsoft SQL Azure, which changes the word "SQL Server" + to "SQL Azure". + .. change:: :tags: bug, orm :versions: 1.0.0 diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py index 8f76336ae..b5a1bc566 100644 --- a/lib/sqlalchemy/dialects/mssql/pymssql.py +++ b/lib/sqlalchemy/dialects/mssql/pymssql.py @@ -63,7 +63,7 @@ class MSDialect_pymssql(MSDialect): def _get_server_version_info(self, connection): vers = connection.scalar("select @@version") m = re.match( - r"Microsoft SQL Server.*? - (\d+).(\d+).(\d+).(\d+)", vers) + r"Microsoft .*? - (\d+).(\d+).(\d+).(\d+)", vers) if m: return tuple(int(x) for x in m.group(1, 2, 3, 4)) else: diff --git a/test/dialect/mssql/test_engine.py b/test/dialect/mssql/test_engine.py index 8ac9c6c16..4b4780d43 100644 --- a/test/dialect/mssql/test_engine.py +++ b/test/dialect/mssql/test_engine.py @@ -7,6 +7,8 @@ from sqlalchemy.engine import url from sqlalchemy.testing import fixtures from sqlalchemy import testing from sqlalchemy.testing import assert_raises_message, assert_warnings +from sqlalchemy.testing.mock import Mock + class ParseConnectTest(fixtures.TestBase): @@ -167,3 +169,21 @@ class ParseConnectTest(fixtures.TestBase): assert_raises_message(exc.SAWarning, 'Unrecognized server version info', engine.connect) + + +class VersionDetectionTest(fixtures.TestBase): + def test_pymssql_version(self): + dialect = pymssql.MSDialect_pymssql() + + for vers in [ + "Microsoft SQL Server Blah - 11.0.9216.62", + "Microsoft SQL Server (XYZ) - 11.0.9216.62 \n" + "Jul 18 2014 22:00:21 \nCopyright (c) Microsoft Corporation", + "Microsoft SQL Azure (RTM) - 11.0.9216.62 \n" + "Jul 18 2014 22:00:21 \nCopyright (c) Microsoft Corporation" + ]: + conn = Mock(scalar=Mock(return_value=vers)) + eq_( + dialect._get_server_version_info(conn), + (11, 0, 9216, 62) + ) \ No newline at end of file -- cgit v1.2.1 From 9d402e204d77da680472cbfb9813e437eb187944 Mon Sep 17 00:00:00 2001 From: Johannes Erdfelt Date: Wed, 17 Sep 2014 07:52:34 -0700 Subject: Handle sqlite get_unique_constraints() call for temporary tables The sqlite get_unique_constraints() implementation did not do a union against the sqlite_temp_master table like other code does. This could result in an exception being raised if get_unique_constraints() was called against a temporary table. --- lib/sqlalchemy/dialects/sqlite/base.py | 28 ++++++++++++++++++---------- test/dialect/test_sqlite.py | 18 ++++++++++++++++++ 2 files changed, 36 insertions(+), 10 deletions(-) diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index af793d275..c76ef6afd 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -1097,16 +1097,24 @@ class SQLiteDialect(default.DefaultDialect): @reflection.cache def get_unique_constraints(self, connection, table_name, schema=None, **kw): - UNIQUE_SQL = """ - SELECT sql - FROM - sqlite_master - WHERE - type='table' AND - name=:table_name - """ - c = connection.execute(UNIQUE_SQL, table_name=table_name) - table_data = c.fetchone()[0] + try: + s = ("SELECT sql FROM " + " (SELECT * FROM sqlite_master UNION ALL " + " SELECT * FROM sqlite_temp_master) " + "WHERE name = '%s' " + "AND type = 'table'") % table_name + rs = connection.execute(s) + except exc.DBAPIError: + s = ("SELECT sql FROM sqlite_master WHERE name = '%s' " + "AND type = 'table'") % table_name + rs = connection.execute(s) + row = rs.fetchone() + if row is None: + # sqlite won't return the schema for the sqlite_master or + # sqlite_temp_master tables from this query. These tables + # don't have any unique constraints anyway. + return [] + table_data = row[0] UNIQUE_PATTERN = 'CONSTRAINT (\w+) UNIQUE \(([^\)]+)\)' return [ diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index e77a03980..6fc644689 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -575,6 +575,24 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults): finally: meta.drop_all() + def test_get_unique_constraints(self): + meta = MetaData(testing.db) + t1 = Table('foo', meta, Column('f', Integer), + UniqueConstraint('f', name='foo_f')) + t2 = Table('bar', meta, Column('b', Integer), + UniqueConstraint('b', name='bar_b'), + prefixes=['TEMPORARY']) + meta.create_all() + from sqlalchemy.engine.reflection import Inspector + try: + inspector = Inspector(testing.db) + eq_(inspector.get_unique_constraints('foo'), + [{'column_names': [u'f'], 'name': u'foo_f'}]) + eq_(inspector.get_unique_constraints('bar'), + [{'column_names': [u'b'], 'name': u'bar_b'}]) + finally: + meta.drop_all() + class SQLTest(fixtures.TestBase, AssertsCompiledSQL): -- cgit v1.2.1 From 414bc1c64d05c8a5a26043e707c9c0013100ce4b Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 17 Sep 2014 12:41:08 -0400 Subject: - the actual round trip requires password authent set up for the user; we don't actually need a round trip test here as we're only testing reflection. --- test/dialect/postgresql/test_reflection.py | 32 +++++++----------------------- 1 file changed, 7 insertions(+), 25 deletions(-) diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index 04ac41f1d..4bc658694 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -26,10 +26,15 @@ class ForeignTableReflectionTest(fixtures.TablesTest, AssertsExecutionResults): dblink = config.file_config.get( 'sqla_testing', 'postgres_test_db_link') + testtable = Table( + 'testtable', metadata, + Column('id', Integer, primary_key=True), + Column('data', String(30))) + for ddl in [ "CREATE SERVER test_server FOREIGN DATA WRAPPER postgres_fdw " "OPTIONS (dbname 'test', host '%s')" % dblink, - "CREATE USER MAPPING FOR scott \ + "CREATE USER MAPPING FOR public \ SERVER test_server options (user 'scott', password 'tiger')", "CREATE FOREIGN TABLE test_foreigntable ( " " id INT, " @@ -40,7 +45,7 @@ class ForeignTableReflectionTest(fixtures.TablesTest, AssertsExecutionResults): for ddl in [ 'DROP FOREIGN TABLE test_foreigntable', - 'DROP USER MAPPING FOR scott SERVER test_server', + 'DROP USER MAPPING FOR public SERVER test_server', "DROP SERVER test_server" ]: sa.event.listen(metadata, "before_drop", sa.DDL(ddl)) @@ -51,29 +56,6 @@ class ForeignTableReflectionTest(fixtures.TablesTest, AssertsExecutionResults): eq_(set(table.columns.keys()), set(['id', 'data']), "Columns of reflected foreign table didn't equal expected columns") - def test_foreign_table_select(self): - metadata = MetaData(testing.db) - table = Table('test_foreigntable', metadata, autoload=True) - - with testing.db.begin() as conn: - eq_( - conn.execute(table.select()).fetchall(), - [(89, 'd1',)] - ) - - def test_foreign_table_roundtrip(self): - metadata = MetaData(testing.db) - table = Table('test_foreigntable', metadata, autoload=True) - - with testing.db.begin() as conn: - conn.execute(table.delete()) - conn.execute(table.insert(), {'id': 89, 'data': 'd1'}) - - eq_( - testing.db.execute(table.select()).fetchall(), - [(89, 'd1',)] - ) - def test_get_foreign_table_names(self): inspector = inspect(testing.db) with testing.db.connect() as conn: -- cgit v1.2.1 From 27617986bbeb028cd2cc0a021e20df517e12a2c5 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 17 Sep 2014 12:41:55 -0400 Subject: keep this off for the moment as jenkins isn't set up for this yet --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index b70086605..51a4e30bf 100644 --- a/setup.cfg +++ b/setup.cfg @@ -31,7 +31,7 @@ oracle_db_link = test_link # CREATE EXTENSION postgres_fdw; # GRANT USAGE ON FOREIGN DATA WRAPPER postgres_fdw TO public; # this can be localhost to create a loopback foreign table -postgres_test_db_link = localhost +# postgres_test_db_link = localhost [db] -- cgit v1.2.1 From be57def4b909a447b10fff21bf957c804132b5ec Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 17 Sep 2014 13:11:22 -0400 Subject: - repair get_foreign_table_names() for PGInsp/dialect level - repair get_view_names() - changelog + migration note --- doc/build/changelog/changelog_10.rst | 17 +++++++++++ doc/build/changelog/migration_10.rst | 28 ++++++++++++++++++ lib/sqlalchemy/dialects/postgresql/base.py | 47 ++++++++++++++++++++---------- lib/sqlalchemy/engine/reflection.py | 3 -- setup.cfg | 2 +- test/dialect/postgresql/test_reflection.py | 25 ++++++++++++++-- 6 files changed, 99 insertions(+), 23 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 9c7f207cc..b00dbb0a7 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,23 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: feature, postgresql + :tickets: 2891 + :pullreq: github:128 + + Support has been added for reflection of materialized views + and foreign tables, as well as support for materialized views + within :meth:`.Inspector.get_view_names`, and a new method + :meth:`.PGInspector.get_foreign_table_names` available on the + Postgresql version of :class:`.Inspector`. Pull request courtesy + Rodrigo Menezes. + + .. seealso:: + + :ref:`feature_2891` + + .. change:: :tags: feature, orm diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index 6a48b31fa..d967afa35 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -865,6 +865,34 @@ method that returns information on all available ``ENUM`` types:: :meth:`.PGInspector.get_enums` +.. _feature_2891: + +Postgresql Dialect reflects Materialized Views, Foreign Tables +-------------------------------------------------------------- + +Changes are as follows: + +* the :class:`Table` construct with ``autoload=True`` will now match a name + that exists in the database as a materialized view or foriegn table. + +* :meth:`.Inspector.get_view_names` will return plain and materialized view + names. + +* :meth:`.Inspector.get_table_names` does **not** change for Postgresql, it + continues to return only the names of plain tables. + +* A new method :meth:`.PGInspector.get_foreign_table_names` is added which + will return the names of tables that are specifically marked as "foreign" + in the Postgresql schema tables. + +The change to reflection involves adding ``'m'`` and ``'f'`` to the list +of qualifiers we use when querying ``pg_class.relkind``, but this change +is new in 1.0.0 to avoid any backwards-incompatible surprises for those +running 0.9 in production. + +:ticket:`2891` + + MySQL internal "no such table" exceptions not passed to event handlers ---------------------------------------------------------------------- diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index df9797658..b9a0d461b 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -401,6 +401,7 @@ The value passed to the keyword argument will be simply passed through to the underlying CREATE INDEX command, so it *must* be a valid index type for your version of PostgreSQL. + Special Reflection Options -------------------------- @@ -1679,22 +1680,18 @@ class PGInspector(reflection.Inspector): schema = schema or self.default_schema_name return self.dialect._load_enums(self.bind, schema) - def get_foreign_table_names(self, connection, schema=None, **kw): - if schema is not None: - current_schema = schema - else: - current_schema = self.default_schema_name + def get_foreign_table_names(self, schema=None): + """Return a list of FOREIGN TABLE names. - result = connection.execute( - sql.text("SELECT relname FROM pg_class c " - "WHERE relkind = 'f' " - "AND '%s' = (select nspname from pg_namespace n " - "where n.oid = c.relnamespace) " % - current_schema, - typemap={'relname': sqltypes.Unicode} - ) - ) - return [row[0] for row in result] + Behavior is similar to that of :meth:`.Inspector.get_table_names`, + except that the list is limited to those tables tha report a + ``relkind`` value of ``f``. + + .. versionadded:: 1.0.0 + + """ + schema = schema or self.default_schema_name + return self.dialect._get_foreign_table_names(self.bind, schema) class CreateEnumType(schema._CreateDropBase): @@ -2094,6 +2091,24 @@ class PGDialect(default.DefaultDialect): ) return [row[0] for row in result] + @reflection.cache + def _get_foreign_table_names(self, connection, schema=None, **kw): + if schema is not None: + current_schema = schema + else: + current_schema = self.default_schema_name + + result = connection.execute( + sql.text("SELECT relname FROM pg_class c " + "WHERE relkind = 'f' " + "AND '%s' = (select nspname from pg_namespace n " + "where n.oid = c.relnamespace) " % + current_schema, + typemap={'relname': sqltypes.Unicode} + ) + ) + return [row[0] for row in result] + @reflection.cache def get_view_names(self, connection, schema=None, **kw): if schema is not None: @@ -2103,7 +2118,7 @@ class PGDialect(default.DefaultDialect): s = """ SELECT relname FROM pg_class c - WHERE relkind IN ('m', v') + WHERE relkind IN ('m', 'v') AND '%(schema)s' = (select nspname from pg_namespace n where n.oid = c.relnamespace) """ % dict(schema=current_schema) diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index b72290588..cf1f2d3dd 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -227,9 +227,6 @@ class Inspector(object): :param schema: Optional, retrieve names from a non-default schema. For special quoting, use :class:`.quoted_name`. - .. versionchanged:: 1.0.0 now returns materialized views as well - as normal views. - """ return self.dialect.get_view_names(self.bind, schema, diff --git a/setup.cfg b/setup.cfg index 51a4e30bf..b70086605 100644 --- a/setup.cfg +++ b/setup.cfg @@ -31,7 +31,7 @@ oracle_db_link = test_link # CREATE EXTENSION postgres_fdw; # GRANT USAGE ON FOREIGN DATA WRAPPER postgres_fdw TO public; # this can be localhost to create a loopback foreign table -# postgres_test_db_link = localhost +postgres_test_db_link = localhost [db] diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index 4bc658694..b8b9be3de 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -59,9 +59,15 @@ class ForeignTableReflectionTest(fixtures.TablesTest, AssertsExecutionResults): def test_get_foreign_table_names(self): inspector = inspect(testing.db) with testing.db.connect() as conn: - ft_names = inspector.get_foreign_table_names(conn) + ft_names = inspector.get_foreign_table_names() eq_(ft_names, ['test_foreigntable']) + def test_get_table_names_no_foreign(self): + inspector = inspect(testing.db) + with testing.db.connect() as conn: + names = inspector.get_table_names() + eq_(names, ['testtable']) + class MaterialiedViewReflectionTest( fixtures.TablesTest, AssertsExecutionResults): @@ -85,15 +91,24 @@ class MaterialiedViewReflectionTest( {"id": 89, "data": 'd1'} ) - view = sa.DDL( + materialized_view = sa.DDL( "CREATE MATERIALIZED VIEW test_mview AS " "SELECT * FROM testtable") - sa.event.listen(testtable, 'after_create', view) + plain_view = sa.DDL( + "CREATE VIEW test_regview AS " + "SELECT * FROM testtable") + + sa.event.listen(testtable, 'after_create', plain_view) + sa.event.listen(testtable, 'after_create', materialized_view) sa.event.listen( testtable, 'before_drop', sa.DDL("DROP MATERIALIZED VIEW test_mview") ) + sa.event.listen( + testtable, 'before_drop', + sa.DDL("DROP VIEW test_regview") + ) def test_mview_is_reflected(self): metadata = MetaData(testing.db) @@ -109,6 +124,10 @@ class MaterialiedViewReflectionTest( [(89, 'd1',)] ) + def test_get_view_names(self): + insp = inspect(testing.db) + eq_(set(insp.get_view_names()), set(['test_mview', 'test_regview'])) + class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults): """Test PostgreSQL domains""" -- cgit v1.2.1 From 4a4cbe5ff2e988ddaa0dff6a53bd09598834af89 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 17 Sep 2014 13:12:41 -0400 Subject: - comment this out (again) --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index b70086605..51a4e30bf 100644 --- a/setup.cfg +++ b/setup.cfg @@ -31,7 +31,7 @@ oracle_db_link = test_link # CREATE EXTENSION postgres_fdw; # GRANT USAGE ON FOREIGN DATA WRAPPER postgres_fdw TO public; # this can be localhost to create a loopback foreign table -postgres_test_db_link = localhost +# postgres_test_db_link = localhost [db] -- cgit v1.2.1 From 1217d6ce97bd469b3ec2c17f6f955730059d571f Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 17 Sep 2014 14:02:34 -0400 Subject: - move this to the correct location --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 5ed71e872..e81ae4697 100644 --- a/tox.ini +++ b/tox.ini @@ -29,11 +29,11 @@ commands= [testenv:pep8] +deps=flake8 commands = python -m flake8 {posargs} [flake8] -deps=flake8 show-source = True ignore = E711,E712,E721,F841,F811 exclude=.venv,.git,.tox,dist,doc,*egg,build -- cgit v1.2.1 From cb23fa243f5138aac7acb2a134d567f1a297d42e Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 17 Sep 2014 15:15:21 -0400 Subject: - Added :meth:`.Inspector.get_temp_table_names` and :meth:`.Inspector.get_temp_view_names`; currently, only the SQLite dialect supports these methods. The return of temporary table and view names has been **removed** from SQLite's version of :meth:`.Inspector.get_table_names` and :meth:`.Inspector.get_view_names`; other database backends cannot support this information (such as MySQL), and the scope of operation is different in that the tables can be local to a session and typically aren't supported in remote schemas. fixes #3204 --- doc/build/changelog/changelog_10.rst | 18 ++++++ doc/build/changelog/migration_10.rst | 20 +++++++ lib/sqlalchemy/dialects/sqlite/base.py | 49 ++++++++-------- lib/sqlalchemy/engine/interfaces.py | 18 +++++- lib/sqlalchemy/engine/reflection.py | 24 ++++++++ lib/sqlalchemy/testing/requirements.py | 14 +++++ lib/sqlalchemy/testing/suite/test_reflection.py | 78 +++++++++++++++++++++++++ test/dialect/test_sqlite.py | 17 ------ test/requirements.py | 11 ++++ 9 files changed, 205 insertions(+), 44 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index b00dbb0a7..7bfcb574e 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,24 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: bug, sqlite + :tickets: 3204 + + Added :meth:`.Inspector.get_temp_table_names` and + :meth:`.Inspector.get_temp_view_names`; currently, only the + SQLite dialect supports these methods. The return of temporary + table and view names has been **removed** from SQLite's version + of :meth:`.Inspector.get_table_names` and + :meth:`.Inspector.get_view_names`; other database backends cannot + support this information (such as MySQL), and the scope of operation + is different in that the tables can be local to a session and + typically aren't supported in remote schemas. + + .. seealso:: + + :ref:`change_3204` + .. change:: :tags: feature, postgresql :tickets: 2891 diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index d967afa35..246eb9a14 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -953,6 +953,26 @@ when using ODBC to avoid this issue entirely. :ticket:`3182` +.. _change_3204: + +SQLite has distinct methods for temporary table/view name reporting +------------------------------------------------------------------- + +The :meth:`.Inspector.get_table_names` and :meth:`.Inspector.get_view_names` +methods in the case of SQLite would also return the names of temporary +tables and views, which is not provided by any other dialect (in the case +of MySQL at least it is not even possible). This logic has been moved +out to two new methods :meth:`.Inspector.get_temp_table_names` and +:meth:`.Inspector.get_temp_view_names`. + +Note that reflection of a specific named temporary table or temporary view, +either by ``Table('name', autoload=True)`` or via methods like +:meth:`.Inspector.get_columns` continues to function for most if not all +dialects. For SQLite specifically, there is a bug fix for UNIQUE constraint +reflection from temp tables as well, which is :ticket:`3203`. + +:ticket:`3204` + .. _change_2984: Drizzle Dialect is now an External Dialect diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index af793d275..b0bf670a6 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -829,20 +829,26 @@ class SQLiteDialect(default.DefaultDialect): if schema is not None: qschema = self.identifier_preparer.quote_identifier(schema) master = '%s.sqlite_master' % qschema - s = ("SELECT name FROM %s " - "WHERE type='table' ORDER BY name") % (master,) - rs = connection.execute(s) else: - try: - s = ("SELECT name FROM " - " (SELECT * FROM sqlite_master UNION ALL " - " SELECT * FROM sqlite_temp_master) " - "WHERE type='table' ORDER BY name") - rs = connection.execute(s) - except exc.DBAPIError: - s = ("SELECT name FROM sqlite_master " - "WHERE type='table' ORDER BY name") - rs = connection.execute(s) + master = "sqlite_master" + s = ("SELECT name FROM %s " + "WHERE type='table' ORDER BY name") % (master,) + rs = connection.execute(s) + return [row[0] for row in rs] + + @reflection.cache + def get_temp_table_names(self, connection, **kw): + s = "SELECT name FROM sqlite_temp_master "\ + "WHERE type='table' ORDER BY name " + rs = connection.execute(s) + + return [row[0] for row in rs] + + @reflection.cache + def get_temp_view_names(self, connection, **kw): + s = "SELECT name FROM sqlite_temp_master "\ + "WHERE type='view' ORDER BY name " + rs = connection.execute(s) return [row[0] for row in rs] @@ -869,20 +875,11 @@ class SQLiteDialect(default.DefaultDialect): if schema is not None: qschema = self.identifier_preparer.quote_identifier(schema) master = '%s.sqlite_master' % qschema - s = ("SELECT name FROM %s " - "WHERE type='view' ORDER BY name") % (master,) - rs = connection.execute(s) else: - try: - s = ("SELECT name FROM " - " (SELECT * FROM sqlite_master UNION ALL " - " SELECT * FROM sqlite_temp_master) " - "WHERE type='view' ORDER BY name") - rs = connection.execute(s) - except exc.DBAPIError: - s = ("SELECT name FROM sqlite_master " - "WHERE type='view' ORDER BY name") - rs = connection.execute(s) + master = "sqlite_master" + s = ("SELECT name FROM %s " + "WHERE type='view' ORDER BY name") % (master,) + rs = connection.execute(s) return [row[0] for row in rs] diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index 71df29cac..0ad2efae0 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -308,7 +308,15 @@ class Dialect(object): def get_table_names(self, connection, schema=None, **kw): """Return a list of table names for `schema`.""" - raise NotImplementedError + raise NotImplementedError() + + def get_temp_table_names(self, connection, schema=None, **kw): + """Return a list of temporary table names on the given connection, + if supported by the underlying backend. + + """ + + raise NotImplementedError() def get_view_names(self, connection, schema=None, **kw): """Return a list of all view names available in the database. @@ -319,6 +327,14 @@ class Dialect(object): raise NotImplementedError() + def get_temp_view_names(self, connection, schema=None, **kw): + """Return a list of temporary view names on the given connection, + if supported by the underlying backend. + + """ + + raise NotImplementedError() + def get_view_definition(self, connection, view_name, schema=None, **kw): """Return view definition. diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index cf1f2d3dd..c0a3240a5 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -201,6 +201,30 @@ class Inspector(object): tnames = list(topological.sort(tuples, tnames)) return tnames + def get_temp_table_names(self): + """return a list of temporary table names for the current bind. + + This method is unsupported by most dialects; currently + only SQLite implements it. + + .. versionadded:: 1.0.0 + + """ + return self.dialect.get_temp_table_names( + self.bind, info_cache=self.info_cache) + + def get_temp_view_names(self): + """return a list of temporary view names for the current bind. + + This method is unsupported by most dialects; currently + only SQLite implements it. + + .. versionadded:: 1.0.0 + + """ + return self.dialect.get_temp_view_names( + self.bind, info_cache=self.info_cache) + def get_table_options(self, table_name, schema=None, **kw): """Return a dictionary of options specified when the table of the given name was created. diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index a04bcbbdd..da3e3128a 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -313,6 +313,20 @@ class SuiteRequirements(Requirements): def foreign_key_constraint_reflection(self): return exclusions.open() + @property + def temp_table_reflection(self): + return exclusions.open() + + @property + def temp_table_names(self): + """target dialect supports listing of temporary table names""" + return exclusions.closed() + + @property + def temporary_views(self): + """target database supports temporary views""" + return exclusions.closed() + @property def index_reflection(self): return exclusions.open() diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py index 575a38db9..690a880bb 100644 --- a/lib/sqlalchemy/testing/suite/test_reflection.py +++ b/lib/sqlalchemy/testing/suite/test_reflection.py @@ -95,6 +95,27 @@ class ComponentReflectionTest(fixtures.TablesTest): cls.define_index(metadata, users) if testing.requires.view_column_reflection.enabled: cls.define_views(metadata, schema) + if not schema and testing.requires.temp_table_reflection.enabled: + cls.define_temp_tables(metadata) + + @classmethod + def define_temp_tables(cls, metadata): + temp_table = Table( + "user_tmp", metadata, + Column("id", sa.INT, primary_key=True), + Column('name', sa.VARCHAR(50)), + Column('foo', sa.INT), + sa.UniqueConstraint('name', name='user_tmp_uq'), + sa.Index("user_tmp_ix", "foo"), + prefixes=['TEMPORARY'] + ) + if testing.requires.view_reflection.enabled and \ + testing.requires.temporary_views.enabled: + event.listen( + temp_table, "after_create", + DDL("create temporary view user_tmp_v as " + "select * from user_tmp") + ) @classmethod def define_index(cls, metadata, users): @@ -147,6 +168,7 @@ class ComponentReflectionTest(fixtures.TablesTest): users, addresses, dingalings = self.tables.users, \ self.tables.email_addresses, self.tables.dingalings insp = inspect(meta.bind) + if table_type == 'view': table_names = insp.get_view_names(schema) table_names.sort() @@ -162,6 +184,20 @@ class ComponentReflectionTest(fixtures.TablesTest): answer = ['dingalings', 'email_addresses', 'users'] eq_(sorted(table_names), answer) + @testing.requires.temp_table_names + def test_get_temp_table_names(self): + insp = inspect(self.metadata.bind) + temp_table_names = insp.get_temp_table_names() + eq_(sorted(temp_table_names), ['user_tmp']) + + @testing.requires.view_reflection + @testing.requires.temp_table_names + @testing.requires.temporary_views + def test_get_temp_view_names(self): + insp = inspect(self.metadata.bind) + temp_table_names = insp.get_temp_view_names() + eq_(sorted(temp_table_names), ['user_tmp_v']) + @testing.requires.table_reflection def test_get_table_names(self): self._test_get_table_names() @@ -294,6 +330,28 @@ class ComponentReflectionTest(fixtures.TablesTest): def test_get_columns_with_schema(self): self._test_get_columns(schema=testing.config.test_schema) + @testing.requires.temp_table_reflection + def test_get_temp_table_columns(self): + meta = MetaData(testing.db) + user_tmp = self.tables.user_tmp + insp = inspect(meta.bind) + cols = insp.get_columns('user_tmp') + self.assert_(len(cols) > 0, len(cols)) + + for i, col in enumerate(user_tmp.columns): + eq_(col.name, cols[i]['name']) + + @testing.requires.temp_table_reflection + @testing.requires.view_column_reflection + @testing.requires.temporary_views + def test_get_temp_view_columns(self): + insp = inspect(self.metadata.bind) + cols = insp.get_columns('user_tmp_v') + eq_( + [col['name'] for col in cols], + ['id', 'name', 'foo'] + ) + @testing.requires.view_column_reflection def test_get_view_columns(self): self._test_get_columns(table_type='view') @@ -426,6 +484,26 @@ class ComponentReflectionTest(fixtures.TablesTest): def test_get_unique_constraints(self): self._test_get_unique_constraints() + @testing.requires.temp_table_reflection + def test_get_temp_table_unique_constraints(self): + insp = inspect(self.metadata.bind) + eq_( + insp.get_unique_constraints('user_tmp'), + [{'column_names': ['name'], 'name': 'user_tmp_uq'}] + ) + + @testing.requires.temp_table_reflection + def test_get_temp_table_indexes(self): + insp = inspect(self.metadata.bind) + indexes = insp.get_indexes('user_tmp') + eq_( + # TODO: we need to add better filtering for indexes/uq constraints + # that are doubled up + [idx for idx in indexes if idx['name'] == 'user_tmp_ix'], + [{'unique': False, 'column_names': ['foo'], 'name': 'user_tmp_ix'}] + ) + + @testing.requires.unique_constraint_reflection @testing.requires.schemas def test_get_unique_constraints_with_schema(self): diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index e77a03980..e1f6225f4 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -515,23 +515,6 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults): finally: cx.execute('DETACH DATABASE test_schema') - @testing.exclude('sqlite', '<', (2, 6), 'no database support') - def test_temp_table_reflection(self): - cx = testing.db.connect() - try: - cx.execute('CREATE TEMPORARY TABLE tempy (id INT)') - assert 'tempy' in cx.dialect.get_table_names(cx, None) - meta = MetaData(cx) - tempy = Table('tempy', meta, autoload=True) - assert len(tempy.c) == 1 - meta.drop_all() - except: - try: - cx.execute('DROP TABLE tempy') - except exc.DBAPIError: - pass - raise - def test_file_path_is_absolute(self): d = pysqlite_dialect.dialect() eq_( diff --git a/test/requirements.py b/test/requirements.py index 14bb25691..cfdfc8054 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -296,6 +296,17 @@ class DefaultRequirements(SuiteRequirements): "sqlite" ) + @property + def temp_table_names(self): + """target dialect supports listing of temporary table names""" + + return only_on(['sqlite']) + + @property + def temporary_views(self): + """target database supports temporary views""" + return only_on(['sqlite', 'postgresql']) + @property def update_nowait(self): """Target database must support SELECT...FOR UPDATE NOWAIT""" -- cgit v1.2.1 From c926df36173e45cfd45807e197f4f4f63ca5128b Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 17 Sep 2014 15:19:19 -0400 Subject: - changelog for #3203; fixes #3203 --- doc/build/changelog/changelog_10.rst | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 7bfcb574e..ca612c0ef 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,20 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: bug, sqlite + :tickets: 3203 + :pullreq: bitbucket:31 + + SQLite now supports reflection of unique constraints from + temp tables; previously, this would fail with a TypeError. + Pull request courtesy Johannes Erdfelt. + + .. seealso:: + + :ref:`change_3204` - changes regarding SQLite temporary + table and view reflection. + .. change:: :tags: bug, sqlite :tickets: 3204 -- cgit v1.2.1 From 354e118f4be39f240a566a290794dc176cb0d52f Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 17 Sep 2014 15:39:47 -0400 Subject: - we can start checking these flake rules --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index e81ae4697..668590611 100644 --- a/tox.ini +++ b/tox.ini @@ -35,6 +35,6 @@ commands = python -m flake8 {posargs} [flake8] show-source = True -ignore = E711,E712,E721,F841,F811 +ignore = E711,E712,E721 exclude=.venv,.git,.tox,dist,doc,*egg,build -- cgit v1.2.1 From 1f2f88d8ffaac5ae98de097e548e205778686cd5 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 17 Sep 2014 15:40:09 -0400 Subject: - not sure what this is testing but remove the self.l that might be contributing to pypy not cleaning up on this one --- test/orm/test_eager_relations.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py index 214b592b5..4c6d9bbe1 100644 --- a/test/orm/test_eager_relations.py +++ b/test/orm/test_eager_relations.py @@ -1253,8 +1253,9 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL): orders=relationship(Order, lazy=False, order_by=orders.c.id), )) q = create_session().query(User) - self.l = q.all() - eq_(self.static.user_all_result, q.order_by(User.id).all()) + def go(): + eq_(self.static.user_all_result, q.order_by(User.id).all()) + self.assert_sql_count(testing.db, go, 1) def test_against_select(self): """test eager loading of a mapper which is against a select""" -- cgit v1.2.1 From 7fa21b22989f6d53ff70a8df71fc6d210c556e07 Mon Sep 17 00:00:00 2001 From: Johannes Erdfelt Date: Wed, 10 Sep 2014 07:37:59 -0700 Subject: Reflect unique constraints when reflecting a Table object Calls to reflect a table did not create any UniqueConstraint objects. The reflection core made no calls to get_unique_constraints and as a result, the sqlite dialect would never reflect any unique constraints. MySQL transparently converts unique constraints into unique indexes, but SQLAlchemy would reflect those as an Index object and as a UniqueConstraint. The reflection core will now deduplicate the unique constraints. PostgreSQL would reflect unique constraints as an Index object and as a UniqueConstraint object. The reflection core will now deduplicate the unique indexes. --- lib/sqlalchemy/dialects/mysql/base.py | 3 +- lib/sqlalchemy/dialects/postgresql/base.py | 18 ++++++++---- lib/sqlalchemy/engine/reflection.py | 34 +++++++++++++++++++++++ lib/sqlalchemy/testing/suite/test_reflection.py | 13 ++++++--- test/dialect/mysql/test_reflection.py | 31 +++++++++++++++++++++ test/dialect/postgresql/test_reflection.py | 37 +++++++++++++++++++++++-- 6 files changed, 124 insertions(+), 12 deletions(-) diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 7ccd59abb..2f85a3626 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -2590,7 +2590,8 @@ class MySQLDialect(default.DefaultDialect): return [ { 'name': key['name'], - 'column_names': [col[0] for col in key['columns']] + 'column_names': [col[0] for col in key['columns']], + 'duplicates_index': key['name'], } for key in parsed_state.keys if key['type'] == 'UNIQUE' diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index b9a0d461b..556493b3c 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -2471,14 +2471,19 @@ class PGDialect(default.DefaultDialect): SELECT i.relname as relname, ix.indisunique, ix.indexprs, ix.indpred, - a.attname, a.attnum, ix.indkey%s + a.attname, a.attnum, c.conrelid, ix.indkey%s FROM pg_class t join pg_index ix on t.oid = ix.indrelid - join pg_class i on i.oid=ix.indexrelid + join pg_class i on i.oid = ix.indexrelid left outer join pg_attribute a - on t.oid=a.attrelid and %s + on t.oid = a.attrelid and %s + left outer join + pg_constraint c + on (ix.indrelid = c.conrelid and + ix.indexrelid = c.conindid and + c.contype in ('p', 'u', 'x')) WHERE t.relkind IN ('r', 'v', 'f', 'm') and t.oid = :table_oid @@ -2501,7 +2506,7 @@ class PGDialect(default.DefaultDialect): sv_idx_name = None for row in c.fetchall(): - idx_name, unique, expr, prd, col, col_num, idx_key = row + idx_name, unique, expr, prd, col, col_num, conrelid, idx_key = row if expr: if idx_name != sv_idx_name: @@ -2523,11 +2528,14 @@ class PGDialect(default.DefaultDialect): index['cols'][col_num] = col index['key'] = [int(k.strip()) for k in idx_key.split()] index['unique'] = unique + index['duplicates_constraint'] = (None if conrelid is None + else idx_name) return [ {'name': name, 'unique': idx['unique'], - 'column_names': [idx['cols'][i] for i in idx['key']]} + 'column_names': [idx['cols'][i] for i in idx['key']], + 'duplicates_constraint': idx['duplicates_constraint']} for name, idx in indexes.items() ] diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index c0a3240a5..330fc2b19 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -603,12 +603,15 @@ class Inspector(object): columns = index_d['column_names'] unique = index_d['unique'] flavor = index_d.get('type', 'index') + duplicates = index_d.get('duplicates_constraint') if include_columns and \ not set(columns).issubset(include_columns): util.warn( "Omitting %s key for (%s), key covers omitted columns." % (flavor, ', '.join(columns))) continue + if duplicates: + continue # look for columns by orig name in cols_by_orig_name, # but support columns that are in-Python only as fallback idx_cols = [] @@ -626,3 +629,34 @@ class Inspector(object): idx_cols.append(idx_col) sa_schema.Index(name, *idx_cols, **dict(unique=unique)) + + # Unique Constraints + constraints = self.get_unique_constraints(table_name, schema) + for const_d in constraints: + conname = const_d['name'] + columns = const_d['column_names'] + duplicates = const_d.get('duplicates_index') + if include_columns and \ + not set(columns).issubset(include_columns): + util.warn( + "Omitting unique constraint key for (%s), " + "key covers omitted columns." % + ', '.join(columns)) + continue + if duplicates: + continue + # look for columns by orig name in cols_by_orig_name, + # but support columns that are in-Python only as fallback + constrained_cols = [] + for c in columns: + try: + constrained_col = cols_by_orig_name[c] \ + if c in cols_by_orig_name else table.c[c] + except KeyError: + util.warn( + "unique constraint key '%s' was not located in " + "columns for table '%s'" % (c, table_name)) + else: + constrained_cols.append(constrained_col) + table.append_constraint( + sa_schema.UniqueConstraint(*constrained_cols, name=conname)) diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py index 690a880bb..bd0be5738 100644 --- a/lib/sqlalchemy/testing/suite/test_reflection.py +++ b/lib/sqlalchemy/testing/suite/test_reflection.py @@ -487,10 +487,12 @@ class ComponentReflectionTest(fixtures.TablesTest): @testing.requires.temp_table_reflection def test_get_temp_table_unique_constraints(self): insp = inspect(self.metadata.bind) - eq_( - insp.get_unique_constraints('user_tmp'), - [{'column_names': ['name'], 'name': 'user_tmp_uq'}] - ) + reflected = insp.get_unique_constraints('user_tmp') + for refl in reflected: + # Different dialects handle duplicate index and constraints + # differently, so ignore this flag + refl.pop('duplicates_index', None) + eq_(reflected, [{'column_names': ['name'], 'name': 'user_tmp_uq'}]) @testing.requires.temp_table_reflection def test_get_temp_table_indexes(self): @@ -544,6 +546,9 @@ class ComponentReflectionTest(fixtures.TablesTest): ) for orig, refl in zip(uniques, reflected): + # Different dialects handle duplicate index and constraints + # differently, so ignore this flag + refl.pop('duplicates_index', None) eq_(orig, refl) @testing.provide_metadata diff --git a/test/dialect/mysql/test_reflection.py b/test/dialect/mysql/test_reflection.py index bf35a2c6b..b8f72b942 100644 --- a/test/dialect/mysql/test_reflection.py +++ b/test/dialect/mysql/test_reflection.py @@ -283,6 +283,37 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults): view_names = dialect.get_view_names(connection, "information_schema") self.assert_('TABLES' in view_names) + def test_reflection_with_unique_constraint(self): + insp = inspect(testing.db) + + uc_table = Table('mysql_uc', MetaData(testing.db), + Column('a', String(10)), + UniqueConstraint('a', name='uc_a')) + + try: + uc_table.create() + + # MySQL converts unique constraints into unique indexes and + # the 0.9 API returns it as both an index and a constraint + indexes = set(i['name'] for i in insp.get_indexes('mysql_uc')) + constraints = set(i['name'] + for i in insp.get_unique_constraints('mysql_uc')) + + self.assert_('uc_a' in indexes) + self.assert_('uc_a' in constraints) + + # However, upon creating a Table object via reflection, it should + # only appear as a unique index and not a constraint + reflected = Table('mysql_uc', MetaData(testing.db), autoload=True) + + indexes = set(i.name for i in reflected.indexes) + constraints = set(uc.name for uc in reflected.constraints) + + self.assert_('uc_a' in indexes) + self.assert_('uc_a' not in constraints) + finally: + uc_table.drop() + class RawReflectionTest(fixtures.TestBase): def setup(self): diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index b8b9be3de..fc013c72a 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -7,7 +7,7 @@ from sqlalchemy.testing import fixtures from sqlalchemy import testing from sqlalchemy import inspect from sqlalchemy import Table, Column, MetaData, Integer, String, \ - PrimaryKeyConstraint, ForeignKey, join, Sequence + PrimaryKeyConstraint, ForeignKey, join, Sequence, UniqueConstraint from sqlalchemy import exc import sqlalchemy as sa from sqlalchemy.dialects.postgresql import base as postgresql @@ -656,7 +656,8 @@ class ReflectionTest(fixtures.TestBase): conn.execute("ALTER TABLE t RENAME COLUMN x to y") ind = testing.db.dialect.get_indexes(conn, "t", None) - eq_(ind, [{'unique': False, 'column_names': ['y'], 'name': 'idx1'}]) + eq_(ind, [{'unique': False, 'duplicates_constraint': None, + 'column_names': ['y'], 'name': 'idx1'}]) conn.close() @testing.provide_metadata @@ -803,6 +804,38 @@ class ReflectionTest(fixtures.TestBase): 'labels': ['sad', 'ok', 'happy'] }]) + def test_reflection_with_unique_constraint(self): + insp = inspect(testing.db) + + uc_table = Table('pgsql_uc', MetaData(testing.db), + Column('a', String(10)), + UniqueConstraint('a', name='uc_a')) + + try: + uc_table.create() + + # PostgreSQL will create an implicit index for a unique + # constraint. As a result, the 0.9 API returns it as both + # an index and a constraint + indexes = set(i['name'] for i in insp.get_indexes('pgsql_uc')) + constraints = set(i['name'] + for i in insp.get_unique_constraints('pgsql_uc')) + + self.assert_('uc_a' in indexes) + self.assert_('uc_a' in constraints) + + # However, upon creating a Table object via reflection, it should + # only appear as a unique constraint and not an index + reflected = Table('pgsql_uc', MetaData(testing.db), autoload=True) + + indexes = set(i.name for i in reflected.indexes) + constraints = set(uc.name for uc in reflected.constraints) + + self.assert_('uc_a' not in indexes) + self.assert_('uc_a' in constraints) + finally: + uc_table.drop() + class CustomTypeReflectionTest(fixtures.TestBase): -- cgit v1.2.1 From e3f07f7206cf0d6a5f2ff9344a365f4657645338 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 17 Sep 2014 19:43:45 -0400 Subject: - Added support for the Oracle table option ON COMMIT. This is being kept separate from Postgresql's ON COMMIT for now even though ON COMMIT is in the SQL standard; the option is still very specific to temp tables and we eventually would provide a more first class temporary table feature. - oracle can apparently do get_temp_table_names() too, so implement that, fix its get_table_names(), and add it to #3204. fixes #3204 again. --- doc/build/changelog/changelog_10.rst | 11 ++++-- doc/build/changelog/migration_10.rst | 6 ++-- lib/sqlalchemy/dialects/oracle/base.py | 46 +++++++++++++++++++++++-- lib/sqlalchemy/testing/suite/test_reflection.py | 22 +++++++++--- test/dialect/test_oracle.py | 17 +++++++++ test/requirements.py | 2 +- 6 files changed, 90 insertions(+), 14 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index ca612c0ef..d6782c917 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -41,9 +41,9 @@ Added :meth:`.Inspector.get_temp_table_names` and :meth:`.Inspector.get_temp_view_names`; currently, only the - SQLite dialect supports these methods. The return of temporary - table and view names has been **removed** from SQLite's version - of :meth:`.Inspector.get_table_names` and + SQLite and Oracle dialects support these methods. The return of + temporary table and view names has been **removed** from SQLite and + Oracle's version of :meth:`.Inspector.get_table_names` and :meth:`.Inspector.get_view_names`; other database backends cannot support this information (such as MySQL), and the scope of operation is different in that the tables can be local to a session and @@ -316,6 +316,11 @@ operation, such as an autoincremented primary key, a Python side default, or a server-side default "eagerly" fetched via RETURNING. + .. change:: + :tags: feature, oracle + + Added support for the Oracle table option ON COMMIT. + .. change:: :tags: feature, postgresql :tickets: 2051 diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index 246eb9a14..de9e9a64c 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -955,11 +955,11 @@ when using ODBC to avoid this issue entirely. .. _change_3204: -SQLite has distinct methods for temporary table/view name reporting -------------------------------------------------------------------- +SQLite/Oracle have distinct methods for temporary table/view name reporting +--------------------------------------------------------------------------- The :meth:`.Inspector.get_table_names` and :meth:`.Inspector.get_view_names` -methods in the case of SQLite would also return the names of temporary +methods in the case of SQLite/Oracle would also return the names of temporary tables and views, which is not provided by any other dialect (in the case of MySQL at least it is not even possible). This logic has been moved out to two new methods :meth:`.Inspector.get_temp_table_names` and diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index 81a9f1a95..837a498fb 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -213,6 +213,21 @@ is reflected and the type is reported as ``DATE``, the time-supporting examining the type of column for use in special Python translations or for migrating schemas to other database backends. +Oracle Table Options +------------------------- + +The CREATE TABLE phrase supports the following options with Oracle +in conjunction with the :class:`.Table` construct: + + +* ``ON COMMIT``:: + + Table( + "some_table", metadata, ..., + prefixes=['GLOBAL TEMPORARY'], oracle_on_commit='PRESERVE ROWS') + +.. versionadded:: 1.0.0 + """ import re @@ -784,6 +799,16 @@ class OracleDDLCompiler(compiler.DDLCompiler): return super(OracleDDLCompiler, self).\ visit_create_index(create, include_schema=True) + def post_create_table(self, table): + table_opts = [] + opts = table.dialect_options['oracle'] + + if opts['on_commit']: + on_commit_options = opts['on_commit'].replace("_", " ").upper() + table_opts.append('\n ON COMMIT %s' % on_commit_options) + + return ''.join(table_opts) + class OracleIdentifierPreparer(compiler.IdentifierPreparer): @@ -842,7 +867,10 @@ class OracleDialect(default.DefaultDialect): reflection_options = ('oracle_resolve_synonyms', ) construct_arguments = [ - (sa_schema.Table, {"resolve_synonyms": False}) + (sa_schema.Table, { + "resolve_synonyms": False, + "on_commit": None + }) ] def __init__(self, @@ -1029,7 +1057,21 @@ class OracleDialect(default.DefaultDialect): "WHERE nvl(tablespace_name, 'no tablespace') NOT IN " "('SYSTEM', 'SYSAUX') " "AND OWNER = :owner " - "AND IOT_NAME IS NULL") + "AND IOT_NAME IS NULL " + "AND DURATION IS NULL") + cursor = connection.execute(s, owner=schema) + return [self.normalize_name(row[0]) for row in cursor] + + @reflection.cache + def get_temp_table_names(self, connection, **kw): + schema = self.denormalize_name(self.default_schema_name) + s = sql.text( + "SELECT table_name FROM all_tables " + "WHERE nvl(tablespace_name, 'no tablespace') NOT IN " + "('SYSTEM', 'SYSAUX') " + "AND OWNER = :owner " + "AND IOT_NAME IS NULL " + "AND DURATION IS NOT NULL") cursor = connection.execute(s, owner=schema) return [self.normalize_name(row[0]) for row in cursor] diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py index 690a880bb..60db9eb47 100644 --- a/lib/sqlalchemy/testing/suite/test_reflection.py +++ b/lib/sqlalchemy/testing/suite/test_reflection.py @@ -100,19 +100,31 @@ class ComponentReflectionTest(fixtures.TablesTest): @classmethod def define_temp_tables(cls, metadata): - temp_table = Table( + # cheat a bit, we should fix this with some dialect-level + # temp table fixture + if testing.against("oracle"): + kw = { + 'prefixes': ["GLOBAL TEMPORARY"], + 'oracle_on_commit': 'PRESERVE ROWS' + } + else: + kw = { + 'prefixes': ["TEMPORARY"], + } + + user_tmp = Table( "user_tmp", metadata, Column("id", sa.INT, primary_key=True), Column('name', sa.VARCHAR(50)), Column('foo', sa.INT), sa.UniqueConstraint('name', name='user_tmp_uq'), sa.Index("user_tmp_ix", "foo"), - prefixes=['TEMPORARY'] + **kw ) if testing.requires.view_reflection.enabled and \ testing.requires.temporary_views.enabled: event.listen( - temp_table, "after_create", + user_tmp, "after_create", DDL("create temporary view user_tmp_v as " "select * from user_tmp") ) @@ -186,7 +198,7 @@ class ComponentReflectionTest(fixtures.TablesTest): @testing.requires.temp_table_names def test_get_temp_table_names(self): - insp = inspect(self.metadata.bind) + insp = inspect(testing.db) temp_table_names = insp.get_temp_table_names() eq_(sorted(temp_table_names), ['user_tmp']) @@ -485,6 +497,7 @@ class ComponentReflectionTest(fixtures.TablesTest): self._test_get_unique_constraints() @testing.requires.temp_table_reflection + @testing.requires.unique_constraint_reflection def test_get_temp_table_unique_constraints(self): insp = inspect(self.metadata.bind) eq_( @@ -503,7 +516,6 @@ class ComponentReflectionTest(fixtures.TablesTest): [{'unique': False, 'column_names': ['foo'], 'name': 'user_tmp_ix'}] ) - @testing.requires.unique_constraint_reflection @testing.requires.schemas def test_get_unique_constraints_with_schema(self): diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py index 187042036..36eacf864 100644 --- a/test/dialect/test_oracle.py +++ b/test/dialect/test_oracle.py @@ -648,6 +648,23 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): "CREATE INDEX bar ON foo (x > 5)" ) + def test_table_options(self): + m = MetaData() + + t = Table( + 'foo', m, + Column('x', Integer), + prefixes=["GLOBAL TEMPORARY"], + oracle_on_commit="PRESERVE ROWS" + ) + + self.assert_compile( + schema.CreateTable(t), + "CREATE GLOBAL TEMPORARY TABLE " + "foo (x INTEGER) ON COMMIT PRESERVE ROWS" + ) + + class CompatFlagsTest(fixtures.TestBase, AssertsCompiledSQL): def _dialect(self, server_version, **kw): diff --git a/test/requirements.py b/test/requirements.py index cfdfc8054..80bd135e9 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -300,7 +300,7 @@ class DefaultRequirements(SuiteRequirements): def temp_table_names(self): """target dialect supports listing of temporary table names""" - return only_on(['sqlite']) + return only_on(['sqlite', 'oracle']) @property def temporary_views(self): -- cgit v1.2.1 From f82f6d55dc05daf2ba0881ded98f5715b70ae3e3 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 18 Sep 2014 11:44:48 -0400 Subject: - Added new method :meth:`.Select.with_statement_hint` and ORM method :meth:`.Query.with_statement_hint` to support statement-level hints that are not specific to a table. fixes #3206 --- doc/build/changelog/changelog_10.rst | 8 ++++++++ lib/sqlalchemy/orm/query.py | 29 +++++++++++++++++++++++++++-- lib/sqlalchemy/sql/compiler.py | 12 ++++++++++++ lib/sqlalchemy/sql/selectable.py | 36 ++++++++++++++++++++++++++++++++---- test/orm/test_query.py | 22 ++++++++++++++++++++++ test/sql/test_compiler.py | 17 +++++++++++++++++ 6 files changed, 118 insertions(+), 6 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index d6782c917..7d7548e11 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,14 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: feature, sql + :tickets: 3206 + + Added new method :meth:`.Select.with_statement_hint` and ORM + method :meth:`.Query.with_statement_hint` to support statement-level + hints that are not specific to a table. + .. change:: :tags: bug, sqlite :tickets: 3203 diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index 60948293b..e6b2bf537 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -1145,7 +1145,8 @@ class Query(object): @_generative() def with_hint(self, selectable, text, dialect_name='*'): - """Add an indexing hint for the given entity or selectable to + """Add an indexing or other executional context + hint for the given entity or selectable to this :class:`.Query`. Functionality is passed straight through to @@ -1153,11 +1154,35 @@ class Query(object): with the addition that ``selectable`` can be a :class:`.Table`, :class:`.Alias`, or ORM entity / mapped class /etc. + + .. seealso:: + + :meth:`.Query.with_statement_hint` + """ - selectable = inspect(selectable).selectable + if selectable is not None: + selectable = inspect(selectable).selectable self._with_hints += ((selectable, text, dialect_name),) + def with_statement_hint(self, text, dialect_name='*'): + """add a statement hint to this :class:`.Select`. + + This method is similar to :meth:`.Select.with_hint` except that + it does not require an individual table, and instead applies to the + statement as a whole. + + This feature calls down into :meth:`.Select.with_statement_hint`. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.Query.with_hint` + + """ + return self.with_hint(None, text, dialect_name) + @_generative() def execution_options(self, **kwargs): """ Set non-SQL options which take effect during execution. diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 5149fa4fe..abda31358 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -1331,6 +1331,9 @@ class SQLCompiler(Compiled): def get_crud_hint_text(self, table, text): return None + def get_statement_hint_text(self, hint_texts): + return " ".join(hint_texts) + def _transform_select_for_nested_joins(self, select): """Rewrite any "a JOIN (b JOIN c)" expression as "a JOIN (select * from b JOIN c) AS anon", to support @@ -1609,6 +1612,15 @@ class SQLCompiler(Compiled): if select._for_update_arg is not None: text += self.for_update_clause(select, **kwargs) + if select._statement_hints: + per_dialect = [ + ht for (dialect_name, ht) + in select._statement_hints + if dialect_name in ('*', self.dialect.name) + ] + if per_dialect: + text += " " + self.get_statement_hint_text(per_dialect) + if self.ctes and \ compound_index == 0 and toplevel: text = self._render_cte_clause() + text diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index 9e8cb3bc5..248048662 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -2153,6 +2153,7 @@ class Select(HasPrefixes, GenerativeSelect): _prefixes = () _hints = util.immutabledict() + _statement_hints = () _distinct = False _from_cloned = None _correlate = () @@ -2525,10 +2526,30 @@ class Select(HasPrefixes, GenerativeSelect): return self._get_display_froms() + def with_statement_hint(self, text, dialect_name='*'): + """add a statement hint to this :class:`.Select`. + + This method is similar to :meth:`.Select.with_hint` except that + it does not require an individual table, and instead applies to the + statement as a whole. + + Hints here are specific to the backend database and may include + directives such as isolation levels, file directives, fetch directives, + etc. + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.Select.with_hint` + + """ + return self.with_hint(None, text, dialect_name) + @_generative def with_hint(self, selectable, text, dialect_name='*'): - """Add an indexing hint for the given selectable to this - :class:`.Select`. + """Add an indexing or other executional context hint for the given + selectable to this :class:`.Select`. The text of the hint is rendered in the appropriate location for the database backend in use, relative @@ -2555,9 +2576,16 @@ class Select(HasPrefixes, GenerativeSelect): mytable, "+ index(%(name)s ix_mytable)", 'oracle').\\ with_hint(mytable, "WITH INDEX ix_mytable", 'sybase') + .. seealso:: + + :meth:`.Select.with_statement_hint` + """ - self._hints = self._hints.union( - {(selectable, dialect_name): text}) + if selectable is None: + self._statement_hints += ((dialect_name, text), ) + else: + self._hints = self._hints.union( + {(selectable, dialect_name): text}) @property def type(self): diff --git a/test/orm/test_query.py b/test/orm/test_query.py index c9f0a5db0..52e266a08 100644 --- a/test/orm/test_query.py +++ b/test/orm/test_query.py @@ -2517,6 +2517,28 @@ class HintsTest(QueryTest, AssertsCompiledSQL): "ON users_1.id > users.id", dialect=dialect ) + def test_statement_hints(self): + User = self.classes.User + + sess = create_session() + stmt = sess.query(User).\ + with_statement_hint("test hint one").\ + with_statement_hint("test hint two").\ + with_statement_hint("test hint three", "postgresql") + + self.assert_compile( + stmt, + "SELECT users.id AS users_id, users.name AS users_name " + "FROM users test hint one test hint two", + ) + + self.assert_compile( + stmt, + "SELECT users.id AS users_id, users.name AS users_name " + "FROM users test hint one test hint two test hint three", + dialect='postgresql' + ) + class TextTest(QueryTest, AssertsCompiledSQL): __dialect__ = 'default' diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py index d47b58f1f..3e6b87351 100644 --- a/test/sql/test_compiler.py +++ b/test/sql/test_compiler.py @@ -2420,6 +2420,23 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): dialect=dialect ) + def test_statement_hints(self): + + stmt = select([table1.c.myid]).\ + with_statement_hint("test hint one").\ + with_statement_hint("test hint two", 'mysql') + + self.assert_compile( + stmt, + "SELECT mytable.myid FROM mytable test hint one", + ) + + self.assert_compile( + stmt, + "SELECT mytable.myid FROM mytable test hint one test hint two", + dialect='mysql' + ) + def test_literal_as_text_fromstring(self): self.assert_compile( and_(text("a"), text("b")), -- cgit v1.2.1 From 9ae4db27b993fbd4666907cd11c2de3a41aee02f Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 18 Sep 2014 15:24:40 -0400 Subject: - Fixed bug that affected many classes of event, particularly ORM events but also engine events, where the usual logic of "de duplicating" a redundant call to :func:`.event.listen` with the same arguments would fail, for those events where the listener function is wrapped. An assertion would be hit within registry.py. This assertion has now been integrated into the deduplication check, with the added bonus of a simpler means of checking deduplication across the board. fixes #3199 --- doc/build/changelog/changelog_09.rst | 14 +++++++++ lib/sqlalchemy/event/attr.py | 6 ++-- lib/sqlalchemy/event/registry.py | 26 +++++++++------- test/base/test_events.py | 59 ++++++++++++++++++++++++++++++++++++ 4 files changed, 90 insertions(+), 15 deletions(-) diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index 91e66bded..56be5b38e 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -13,6 +13,20 @@ .. changelog:: :version: 0.9.8 + .. change:: + :tags: bug, orm + :versions: 1.0.0 + :tickets: 3199 + + Fixed bug that affected many classes of event, particularly + ORM events but also engine events, where the usual logic of + "de duplicating" a redundant call to :func:`.event.listen` + with the same arguments would fail, for those events where the + listener function is wrapped. An assertion would be hit within + registry.py. This assertion has now been integrated into the + deduplication check, with the added bonus of a simpler means + of checking deduplication across the board. + .. change:: :tags: bug, mssql :versions: 1.0.0 diff --git a/lib/sqlalchemy/event/attr.py b/lib/sqlalchemy/event/attr.py index dba1063cf..be2a82208 100644 --- a/lib/sqlalchemy/event/attr.py +++ b/lib/sqlalchemy/event/attr.py @@ -319,14 +319,12 @@ class _ListenerCollection(RefCollection, _CompoundListener): registry._stored_in_collection_multi(self, other, to_associate) def insert(self, event_key, propagate): - if event_key._listen_fn not in self.listeners: - event_key.prepend_to_list(self, self.listeners) + if event_key.prepend_to_list(self, self.listeners): if propagate: self.propagate.add(event_key._listen_fn) def append(self, event_key, propagate): - if event_key._listen_fn not in self.listeners: - event_key.append_to_list(self, self.listeners) + if event_key.append_to_list(self, self.listeners): if propagate: self.propagate.add(event_key._listen_fn) diff --git a/lib/sqlalchemy/event/registry.py b/lib/sqlalchemy/event/registry.py index ba2f671a3..217cf7d44 100644 --- a/lib/sqlalchemy/event/registry.py +++ b/lib/sqlalchemy/event/registry.py @@ -71,13 +71,15 @@ def _stored_in_collection(event_key, owner): listen_ref = weakref.ref(event_key._listen_fn) if owner_ref in dispatch_reg: - assert dispatch_reg[owner_ref] == listen_ref - else: - dispatch_reg[owner_ref] = listen_ref + return False + + dispatch_reg[owner_ref] = listen_ref listener_to_key = _collection_to_key[owner_ref] listener_to_key[listen_ref] = key + return True + def _removed_from_collection(event_key, owner): key = event_key._key @@ -229,18 +231,20 @@ class _EventKey(object): def _listen_fn(self): return self.fn_wrap or self.fn - def append_value_to_list(self, owner, list_, value): - _stored_in_collection(self, owner) - list_.append(value) - def append_to_list(self, owner, list_): - _stored_in_collection(self, owner) - list_.append(self._listen_fn) + if _stored_in_collection(self, owner): + list_.append(self._listen_fn) + return True + else: + return False def remove_from_list(self, owner, list_): _removed_from_collection(self, owner) list_.remove(self._listen_fn) def prepend_to_list(self, owner, list_): - _stored_in_collection(self, owner) - list_.appendleft(self._listen_fn) + if _stored_in_collection(self, owner): + list_.appendleft(self._listen_fn) + return True + else: + return False diff --git a/test/base/test_events.py b/test/base/test_events.py index 30b728cd3..913e1d3f5 100644 --- a/test/base/test_events.py +++ b/test/base/test_events.py @@ -996,6 +996,25 @@ class RemovalTest(fixtures.TestBase): dispatch = event.dispatcher(TargetEvents) return Target + def _wrapped_fixture(self): + class TargetEvents(event.Events): + @classmethod + def _listen(cls, event_key): + fn = event_key.fn + + def adapt(value): + fn("adapted " + value) + event_key = event_key.with_wrapper(adapt) + + event_key.base_listen() + + def event_one(self, value): + pass + + class Target(object): + dispatch = event.dispatcher(TargetEvents) + return Target + def test_clslevel(self): Target = self._fixture() @@ -1194,3 +1213,43 @@ class RemovalTest(fixtures.TestBase): "deque mutated during iteration", t1.dispatch.event_one ) + + def test_double_event_nonwrapped(self): + Target = self._fixture() + + listen_one = Mock() + t1 = Target() + event.listen(t1, "event_one", listen_one) + event.listen(t1, "event_one", listen_one) + + t1.dispatch.event_one("t1") + + # doubles are eliminated + eq_(listen_one.mock_calls, [call("t1")]) + + # only one remove needed + event.remove(t1, "event_one", listen_one) + t1.dispatch.event_one("t2") + + eq_(listen_one.mock_calls, [call("t1")]) + + def test_double_event_wrapped(self): + # this is issue #3199 + Target = self._wrapped_fixture() + + listen_one = Mock() + t1 = Target() + + event.listen(t1, "event_one", listen_one) + event.listen(t1, "event_one", listen_one) + + t1.dispatch.event_one("t1") + + # doubles are eliminated + eq_(listen_one.mock_calls, [call("adapted t1")]) + + # only one remove needed + event.remove(t1, "event_one", listen_one) + t1.dispatch.event_one("t2") + + eq_(listen_one.mock_calls, [call("adapted t1")]) -- cgit v1.2.1 From c7ec21b29e926c40dd64eb2909d5f8b5e120ed94 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 18 Sep 2014 15:42:27 -0400 Subject: - Fixed an unlikely race condition observed in some exotic end-user setups, where the attempt to check for "duplicate class name" in declarative would hit upon a not-totally-cleaned-up weak reference related to some other class being removed; the check here now ensures the weakref still references an object before calling upon it further. fixes #3208 --- doc/build/changelog/changelog_09.rst | 11 +++++++++++ lib/sqlalchemy/ext/declarative/clsregistry.py | 7 ++++++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index 56be5b38e..c0d13e16d 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -13,6 +13,17 @@ .. changelog:: :version: 0.9.8 + .. change:: + :tags: bug, declarative + :versions: 1.0.0 + :tickets: 3208 + + Fixed an unlikely race condition observed in some exotic end-user + setups, where the attempt to check for "duplicate class name" in + declarative would hit upon a not-totally-cleaned-up weak reference + related to some other class being removed; the check here now ensures + the weakref still references an object before calling upon it further. + .. change:: :tags: bug, orm :versions: 1.0.0 diff --git a/lib/sqlalchemy/ext/declarative/clsregistry.py b/lib/sqlalchemy/ext/declarative/clsregistry.py index 4595b857a..3ef63a5ae 100644 --- a/lib/sqlalchemy/ext/declarative/clsregistry.py +++ b/lib/sqlalchemy/ext/declarative/clsregistry.py @@ -103,7 +103,12 @@ class _MultipleClassMarker(object): self.on_remove() def add_item(self, item): - modules = set([cls().__module__ for cls in self.contents]) + # protect against class registration race condition against + # asynchronous garbage collection calling _remove_item, + # [ticket:3208] + modules = set([ + cls.__module__ for cls in + [ref() for ref in self.contents] if cls is not None]) if item.__module__ in modules: util.warn( "This declarative base already contains a class with the " -- cgit v1.2.1 From fe5af837dc1b3ae244d6817340155aa07c2fb850 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 18 Sep 2014 16:28:54 -0400 Subject: fix test --- test/orm/test_query.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/orm/test_query.py b/test/orm/test_query.py index 52e266a08..f14ad7864 100644 --- a/test/orm/test_query.py +++ b/test/orm/test_query.py @@ -2482,6 +2482,8 @@ class YieldTest(_fixtures.FixtureTest): class HintsTest(QueryTest, AssertsCompiledSQL): + __dialect__ = 'default' + def test_hints(self): User = self.classes.User -- cgit v1.2.1 From b36cdefba273b8df0bc0ddf3ad072ba6031712ab Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 18 Sep 2014 17:49:07 -0400 Subject: - Fixed bug that affected generally the same classes of event as that of :ticket:`3199`, when the ``named=True`` parameter would be used. Some events would fail to register, and others would not invoke the event arguments correctly, generally in the case of when an event was "wrapped" for adaption in some other way. The "named" mechanics have been rearranged to not interfere with the argument signature expected by internal wrapper functions. fixes #3197 --- doc/build/changelog/changelog_09.rst | 13 +++++ lib/sqlalchemy/event/registry.py | 14 ++++- lib/sqlalchemy/events.py | 3 +- lib/sqlalchemy/orm/events.py | 12 ++-- test/base/test_events.py | 110 ++++++++++++++++++++++++++++++----- test/engine/test_execute.py | 42 +++++++++++++ test/orm/test_attributes.py | 50 +++++++++++++++- test/orm/test_events.py | 18 +++--- 8 files changed, 231 insertions(+), 31 deletions(-) diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index c0d13e16d..7c75996a4 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -13,6 +13,19 @@ .. changelog:: :version: 0.9.8 + .. change:: + :tags: bug, orm, engine + :versions: 1.0.0 + :tickets: 3197 + + Fixed bug that affected generally the same classes of event + as that of :ticket:`3199`, when the ``named=True`` parameter + would be used. Some events would fail to register, and others + would not invoke the event arguments correctly, generally in the + case of when an event was "wrapped" for adaption in some other way. + The "named" mechanics have been rearranged to not interfere with + the argument signature expected by internal wrapper functions. + .. change:: :tags: bug, declarative :versions: 1.0.0 diff --git a/lib/sqlalchemy/event/registry.py b/lib/sqlalchemy/event/registry.py index 217cf7d44..5b422c401 100644 --- a/lib/sqlalchemy/event/registry.py +++ b/lib/sqlalchemy/event/registry.py @@ -182,6 +182,17 @@ class _EventKey(object): def listen(self, *args, **kw): once = kw.pop("once", False) + named = kw.pop("named", False) + + target, identifier, fn = \ + self.dispatch_target, self.identifier, self._listen_fn + + dispatch_descriptor = getattr(target.dispatch, identifier) + + adjusted_fn = dispatch_descriptor._adjust_fn_spec(fn, named) + + self = self.with_wrapper(adjusted_fn) + if once: self.with_wrapper( util.only_once(self._listen_fn)).listen(*args, **kw) @@ -217,9 +228,6 @@ class _EventKey(object): dispatch_descriptor = getattr(target.dispatch, identifier) - fn = dispatch_descriptor._adjust_fn_spec(fn, named) - self = self.with_wrapper(fn) - if insert: dispatch_descriptor.\ for_modify(target.dispatch).insert(self, propagate) diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py index 1ecec51b6..1ff35b8b0 100644 --- a/lib/sqlalchemy/events.py +++ b/lib/sqlalchemy/events.py @@ -470,7 +470,8 @@ class ConnectionEvents(event.Events): @classmethod def _listen(cls, event_key, retval=False): target, identifier, fn = \ - event_key.dispatch_target, event_key.identifier, event_key.fn + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn target._has_events = True diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py index c50a7b062..9ea0dd834 100644 --- a/lib/sqlalchemy/orm/events.py +++ b/lib/sqlalchemy/orm/events.py @@ -61,7 +61,8 @@ class InstrumentationEvents(event.Events): @classmethod def _listen(cls, event_key, propagate=True, **kw): target, identifier, fn = \ - event_key.dispatch_target, event_key.identifier, event_key.fn + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn def listen(target_cls, *arg): listen_cls = target() @@ -192,7 +193,8 @@ class InstanceEvents(event.Events): @classmethod def _listen(cls, event_key, raw=False, propagate=False, **kw): target, identifier, fn = \ - event_key.dispatch_target, event_key.identifier, event_key.fn + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn if not raw: def wrap(state, *arg, **kw): @@ -498,7 +500,8 @@ class MapperEvents(event.Events): def _listen( cls, event_key, raw=False, retval=False, propagate=False, **kw): target, identifier, fn = \ - event_key.dispatch_target, event_key.identifier, event_key.fn + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn if identifier in ("before_configured", "after_configured") and \ target is not mapperlib.Mapper: @@ -1493,7 +1496,8 @@ class AttributeEvents(event.Events): propagate=False): target, identifier, fn = \ - event_key.dispatch_target, event_key.identifier, event_key.fn + event_key.dispatch_target, event_key.identifier, \ + event_key._listen_fn if active_history: target.dispatch._active_history = True diff --git a/test/base/test_events.py b/test/base/test_events.py index 913e1d3f5..89379961e 100644 --- a/test/base/test_events.py +++ b/test/base/test_events.py @@ -192,7 +192,7 @@ class EventsTest(fixtures.TestBase): class NamedCallTest(fixtures.TestBase): - def setUp(self): + def _fixture(self): class TargetEventsOne(event.Events): def event_one(self, x, y): pass @@ -205,48 +205,104 @@ class NamedCallTest(fixtures.TestBase): class TargetOne(object): dispatch = event.dispatcher(TargetEventsOne) - self.TargetOne = TargetOne + return TargetOne - def tearDown(self): - event.base._remove_dispatcher( - self.TargetOne.__dict__['dispatch'].events) + def _wrapped_fixture(self): + class TargetEvents(event.Events): + @classmethod + def _listen(cls, event_key): + fn = event_key._listen_fn + + def adapt(*args): + fn(*["adapted %s" % arg for arg in args]) + event_key = event_key.with_wrapper(adapt) + + event_key.base_listen() + + def event_one(self, x, y): + pass + + def event_five(self, x, y, z, q): + pass + + class Target(object): + dispatch = event.dispatcher(TargetEvents) + return Target def test_kw_accept(self): + TargetOne = self._fixture() + canary = Mock() - @event.listens_for(self.TargetOne, "event_one", named=True) + @event.listens_for(TargetOne, "event_one", named=True) def handler1(**kw): canary(kw) - self.TargetOne().dispatch.event_one(4, 5) + TargetOne().dispatch.event_one(4, 5) eq_( canary.mock_calls, [call({"x": 4, "y": 5})] ) + def test_kw_accept_wrapped(self): + TargetOne = self._wrapped_fixture() + + canary = Mock() + + @event.listens_for(TargetOne, "event_one", named=True) + def handler1(**kw): + canary(kw) + + TargetOne().dispatch.event_one(4, 5) + + eq_( + canary.mock_calls, + [call({'y': 'adapted 5', 'x': 'adapted 4'})] + ) + def test_partial_kw_accept(self): + TargetOne = self._fixture() + canary = Mock() - @event.listens_for(self.TargetOne, "event_five", named=True) + @event.listens_for(TargetOne, "event_five", named=True) def handler1(z, y, **kw): canary(z, y, kw) - self.TargetOne().dispatch.event_five(4, 5, 6, 7) + TargetOne().dispatch.event_five(4, 5, 6, 7) eq_( canary.mock_calls, [call(6, 5, {"x": 4, "q": 7})] ) + def test_partial_kw_accept_wrapped(self): + TargetOne = self._wrapped_fixture() + + canary = Mock() + + @event.listens_for(TargetOne, "event_five", named=True) + def handler1(z, y, **kw): + canary(z, y, kw) + + TargetOne().dispatch.event_five(4, 5, 6, 7) + + eq_( + canary.mock_calls, + [call('adapted 6', 'adapted 5', + {'q': 'adapted 7', 'x': 'adapted 4'})] + ) + def test_kw_accept_plus_kw(self): + TargetOne = self._fixture() canary = Mock() - @event.listens_for(self.TargetOne, "event_two", named=True) + @event.listens_for(TargetOne, "event_two", named=True) def handler1(**kw): canary(kw) - self.TargetOne().dispatch.event_two(4, 5, z=8, q=5) + TargetOne().dispatch.event_two(4, 5, z=8, q=5) eq_( canary.mock_calls, @@ -1000,7 +1056,7 @@ class RemovalTest(fixtures.TestBase): class TargetEvents(event.Events): @classmethod def _listen(cls, event_key): - fn = event_key.fn + fn = event_key._listen_fn def adapt(value): fn("adapted " + value) @@ -1008,7 +1064,7 @@ class RemovalTest(fixtures.TestBase): event_key.base_listen() - def event_one(self, value): + def event_one(self, x): pass class Target(object): @@ -1214,6 +1270,34 @@ class RemovalTest(fixtures.TestBase): t1.dispatch.event_one ) + def test_remove_plain_named(self): + Target = self._fixture() + + listen_one = Mock() + t1 = Target() + event.listen(t1, "event_one", listen_one, named=True) + t1.dispatch.event_one("t1") + + eq_(listen_one.mock_calls, [call(x="t1")]) + event.remove(t1, "event_one", listen_one) + t1.dispatch.event_one("t2") + + eq_(listen_one.mock_calls, [call(x="t1")]) + + def test_remove_wrapped_named(self): + Target = self._wrapped_fixture() + + listen_one = Mock() + t1 = Target() + event.listen(t1, "event_one", listen_one, named=True) + t1.dispatch.event_one("t1") + + eq_(listen_one.mock_calls, [call(x="adapted t1")]) + event.remove(t1, "event_one", listen_one) + t1.dispatch.event_one("t2") + + eq_(listen_one.mock_calls, [call(x="adapted t1")]) + def test_double_event_nonwrapped(self): Target = self._fixture() diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py index d8e1c655e..e14a4fd2a 100644 --- a/test/engine/test_execute.py +++ b/test/engine/test_execute.py @@ -1440,6 +1440,48 @@ class EngineEventsTest(fixtures.TestBase): 'begin', 'execute', 'cursor_execute', 'commit', ]) + def test_transactional_named(self): + canary = [] + + def tracker(name): + def go(*args, **kw): + canary.append((name, set(kw))) + return go + + engine = engines.testing_engine() + event.listen(engine, 'before_execute', tracker('execute'), named=True) + event.listen( + engine, 'before_cursor_execute', + tracker('cursor_execute'), named=True) + event.listen(engine, 'begin', tracker('begin'), named=True) + event.listen(engine, 'commit', tracker('commit'), named=True) + event.listen(engine, 'rollback', tracker('rollback'), named=True) + + conn = engine.connect() + trans = conn.begin() + conn.execute(select([1])) + trans.rollback() + trans = conn.begin() + conn.execute(select([1])) + trans.commit() + + eq_( + canary, [ + ('begin', set(['conn', ])), + ('execute', set([ + 'conn', 'clauseelement', 'multiparams', 'params'])), + ('cursor_execute', set([ + 'conn', 'cursor', 'executemany', + 'statement', 'parameters', 'context'])), + ('rollback', set(['conn', ])), ('begin', set(['conn', ])), + ('execute', set([ + 'conn', 'clauseelement', 'multiparams', 'params'])), + ('cursor_execute', set([ + 'conn', 'cursor', 'executemany', 'statement', + 'parameters', 'context'])), + ('commit', set(['conn', ]))] + ) + @testing.requires.savepoints @testing.requires.two_phase_transactions def test_transactional_advanced(self): diff --git a/test/orm/test_attributes.py b/test/orm/test_attributes.py index 46d5f86e5..9c1f7a985 100644 --- a/test/orm/test_attributes.py +++ b/test/orm/test_attributes.py @@ -2522,6 +2522,53 @@ class ListenerTest(fixtures.ORMTest): f1.barset.add(b1) assert f1.barset.pop().data == 'some bar appended' + def test_named(self): + canary = Mock() + + class Foo(object): + pass + + class Bar(object): + pass + + instrumentation.register_class(Foo) + instrumentation.register_class(Bar) + attributes.register_attribute( + Foo, 'data', uselist=False, + useobject=False) + attributes.register_attribute( + Foo, 'barlist', uselist=True, + useobject=True) + + event.listen(Foo.data, 'set', canary.set, named=True) + event.listen(Foo.barlist, 'append', canary.append, named=True) + event.listen(Foo.barlist, 'remove', canary.remove, named=True) + + f1 = Foo() + b1 = Bar() + f1.data = 5 + f1.barlist.append(b1) + f1.barlist.remove(b1) + eq_( + canary.mock_calls, + [ + call.set( + oldvalue=attributes.NO_VALUE, + initiator=attributes.Event( + Foo.data.impl, attributes.OP_REPLACE), + target=f1, value=5), + call.append( + initiator=attributes.Event( + Foo.barlist.impl, attributes.OP_APPEND), + target=f1, + value=b1), + call.remove( + initiator=attributes.Event( + Foo.barlist.impl, attributes.OP_REMOVE), + target=f1, + value=b1)] + ) + def test_collection_link_events(self): class Foo(object): pass @@ -2559,9 +2606,6 @@ class ListenerTest(fixtures.ORMTest): ) - - - def test_none_on_collection_event(self): """test that append/remove of None in collections emits events. diff --git a/test/orm/test_events.py b/test/orm/test_events.py index e6efd6fb9..904293102 100644 --- a/test/orm/test_events.py +++ b/test/orm/test_events.py @@ -112,6 +112,7 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): mapper(User, users) canary = self.listen_all(User) + named_canary = self.listen_all(User, named=True) sess = create_session() u = User(name='u1') @@ -125,13 +126,15 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): sess.flush() sess.delete(u) sess.flush() - eq_(canary, - ['init', 'before_insert', - 'after_insert', 'expire', - 'refresh', - 'load', - 'before_update', 'after_update', 'before_delete', - 'after_delete']) + expected = [ + 'init', 'before_insert', + 'after_insert', 'expire', + 'refresh', + 'load', + 'before_update', 'after_update', 'before_delete', + 'after_delete'] + eq_(canary, expected) + eq_(named_canary, expected) def test_insert_before_configured(self): users, User = self.tables.users, self.classes.User @@ -1193,6 +1196,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): 'before_commit', 'after_commit','after_transaction_end'] ) + def test_rollback_hook(self): User, users = self.classes.User, self.tables.users sess, canary = self._listener_fixture() -- cgit v1.2.1 From f458ef84569c63ca1f98bfda3aef6f41119940e0 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 19 Sep 2014 12:39:34 -0400 Subject: - these tests don't test anything in SQLAlchemy - from our perpsective, we need to be in transactions (tested elsewhere) and we need to emit the correct FOR UPDATE strings (tested elsewhere). There's nothing in SQLA to be tested as far as validating that for update causes exceptions or not, and these tests frequently fail as they are timing sensitive. --- test/engine/test_transaction.py | 133 ---------------------------------------- 1 file changed, 133 deletions(-) diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py index 8a5303642..d921e9ead 100644 --- a/test/engine/test_transaction.py +++ b/test/engine/test_transaction.py @@ -1126,139 +1126,6 @@ class TLTransactionTest(fixtures.TestBase): order_by(users.c.user_id)).fetchall(), [(1, ), (2, )]) -counters = None - - -class ForUpdateTest(fixtures.TestBase): - __requires__ = 'ad_hoc_engines', - __backend__ = True - - @classmethod - def setup_class(cls): - global counters, metadata - metadata = MetaData() - counters = Table('forupdate_counters', metadata, - Column('counter_id', INT, primary_key=True), - Column('counter_value', INT), - test_needs_acid=True) - counters.create(testing.db) - - def teardown(self): - testing.db.execute(counters.delete()).close() - - @classmethod - def teardown_class(cls): - counters.drop(testing.db) - - def increment(self, count, errors, update_style=True, delay=0.005): - con = testing.db.connect() - sel = counters.select(for_update=update_style, - whereclause=counters.c.counter_id == 1) - for i in range(count): - trans = con.begin() - try: - existing = con.execute(sel).first() - incr = existing['counter_value'] + 1 - time.sleep(delay) - con.execute(counters.update(counters.c.counter_id == 1, - values={'counter_value': incr})) - time.sleep(delay) - readback = con.execute(sel).first() - if readback['counter_value'] != incr: - raise AssertionError('Got %s post-update, expected ' - '%s' % (readback['counter_value'], incr)) - trans.commit() - except Exception as e: - trans.rollback() - errors.append(e) - break - con.close() - - @testing.crashes('mssql', 'FIXME: unknown') - @testing.crashes('firebird', 'FIXME: unknown') - @testing.crashes('sybase', 'FIXME: unknown') - @testing.requires.independent_connections - def test_queued_update(self): - """Test SELECT FOR UPDATE with concurrent modifications. - - Runs concurrent modifications on a single row in the users - table, with each mutator trying to increment a value stored in - user_name. - - """ - - db = testing.db - db.execute(counters.insert(), counter_id=1, counter_value=0) - iterations, thread_count = 10, 5 - threads, errors = [], [] - for i in range(thread_count): - thrd = threading.Thread(target=self.increment, - args=(iterations, ), - kwargs={'errors': errors, - 'update_style': True}) - thrd.start() - threads.append(thrd) - for thrd in threads: - thrd.join() - assert not errors - sel = counters.select(whereclause=counters.c.counter_id == 1) - final = db.execute(sel).first() - eq_(final['counter_value'], iterations * thread_count) - - def overlap(self, ids, errors, update_style): - - sel = counters.select(for_update=update_style, - whereclause=counters.c.counter_id.in_(ids)) - con = testing.db.connect() - trans = con.begin() - try: - rows = con.execute(sel).fetchall() - time.sleep(0.50) - trans.commit() - except Exception as e: - trans.rollback() - errors.append(e) - con.close() - - def _threaded_overlap(self, thread_count, groups, update_style=True, pool=5): - db = testing.db - for cid in range(pool - 1): - db.execute(counters.insert(), counter_id=cid + 1, - counter_value=0) - errors, threads = [], [] - for i in range(thread_count): - thrd = threading.Thread(target=self.overlap, - args=(groups.pop(0), errors, - update_style)) - time.sleep(0.20) # give the previous thread a chance to start - # to ensure it gets a lock - thrd.start() - threads.append(thrd) - for thrd in threads: - thrd.join() - return errors - - @testing.crashes('mssql', 'FIXME: unknown') - @testing.crashes('firebird', 'FIXME: unknown') - @testing.crashes('sybase', 'FIXME: unknown') - @testing.requires.independent_connections - def test_queued_select(self): - """Simple SELECT FOR UPDATE conflict test""" - - errors = self._threaded_overlap(2, [(1, 2, 3), (3, 4, 5)]) - assert not errors - - @testing.crashes('mssql', 'FIXME: unknown') - @testing.fails_on('mysql', 'No support for NOWAIT') - @testing.crashes('firebird', 'FIXME: unknown') - @testing.crashes('sybase', 'FIXME: unknown') - @testing.requires.independent_connections - def test_nowait_select(self): - """Simple SELECT FOR UPDATE NOWAIT conflict test""" - - errors = self._threaded_overlap(2, [(1, 2, 3), (3, 4, 5)], - update_style='nowait') - assert errors class IsolationLevelTest(fixtures.TestBase): __requires__ = ('isolation_level', 'ad_hoc_engines') -- cgit v1.2.1 From 1dacbb25f86f828ab5df6b6424f4eb7f402a356e Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 22 Sep 2014 22:22:09 -0400 Subject: pep8/flake8 --- test/ext/test_automap.py | 101 +++++++++++++++++++++++++++-------------------- 1 file changed, 59 insertions(+), 42 deletions(-) diff --git a/test/ext/test_automap.py b/test/ext/test_automap.py index f24164cb7..6cfd0fbca 100644 --- a/test/ext/test_automap.py +++ b/test/ext/test_automap.py @@ -1,13 +1,14 @@ -from sqlalchemy.testing import fixtures, eq_ +from sqlalchemy.testing import fixtures from ..orm._fixtures import FixtureTest from sqlalchemy.ext.automap import automap_base -from sqlalchemy.orm import relationship, interfaces, backref +from sqlalchemy.orm import relationship, interfaces from sqlalchemy.ext.automap import generate_relationship -from sqlalchemy.testing.mock import Mock, call +from sqlalchemy.testing.mock import Mock from sqlalchemy import String, Integer, ForeignKey from sqlalchemy import testing from sqlalchemy.testing.schema import Table, Column + class AutomapTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): @@ -27,6 +28,7 @@ class AutomapTest(fixtures.MappedTest): def test_relationship_explicit_override_o2m(self): Base = automap_base(metadata=self.metadata) prop = relationship("addresses", collection_class=set) + class User(Base): __tablename__ = 'users' @@ -44,6 +46,7 @@ class AutomapTest(fixtures.MappedTest): Base = automap_base(metadata=self.metadata) prop = relationship("users") + class Address(Base): __tablename__ = 'addresses' @@ -57,7 +60,6 @@ class AutomapTest(fixtures.MappedTest): u1 = User(name='u1', address_collection=[a1]) assert a1.users is u1 - def test_relationship_self_referential(self): Base = automap_base(metadata=self.metadata) Base.prepare() @@ -75,17 +77,19 @@ class AutomapTest(fixtures.MappedTest): def classname_for_table(base, tablename, table): return str("cls_" + tablename) - def name_for_scalar_relationship(base, local_cls, referred_cls, constraint): + def name_for_scalar_relationship( + base, local_cls, referred_cls, constraint): return "scalar_" + referred_cls.__name__ - def name_for_collection_relationship(base, local_cls, referred_cls, constraint): + def name_for_collection_relationship( + base, local_cls, referred_cls, constraint): return "coll_" + referred_cls.__name__ Base.prepare( - classname_for_table=classname_for_table, - name_for_scalar_relationship=name_for_scalar_relationship, - name_for_collection_relationship=name_for_collection_relationship - ) + classname_for_table=classname_for_table, + name_for_scalar_relationship=name_for_scalar_relationship, + name_for_collection_relationship=name_for_collection_relationship + ) User = Base.classes.cls_users Address = Base.classes.cls_addresses @@ -113,9 +117,10 @@ class AutomapTest(fixtures.MappedTest): class Order(Base): __tablename__ = 'orders' - items_collection = relationship("items", - secondary="order_items", - collection_class=set) + items_collection = relationship( + "items", + secondary="order_items", + collection_class=set) Base.prepare() Item = Base.classes['items'] @@ -133,41 +138,49 @@ class AutomapTest(fixtures.MappedTest): Base = automap_base(metadata=self.metadata) mock = Mock() - def _gen_relationship(base, direction, return_fn, attrname, - local_cls, referred_cls, **kw): + + def _gen_relationship( + base, direction, return_fn, attrname, + local_cls, referred_cls, **kw): mock(base, direction, attrname) - return generate_relationship(base, direction, return_fn, - attrname, local_cls, referred_cls, **kw) + return generate_relationship( + base, direction, return_fn, + attrname, local_cls, referred_cls, **kw) Base.prepare(generate_relationship=_gen_relationship) assert set(tuple(c[1]) for c in mock.mock_calls).issuperset([ - (Base, interfaces.MANYTOONE, "nodes"), - (Base, interfaces.MANYTOMANY, "keywords_collection"), - (Base, interfaces.MANYTOMANY, "items_collection"), - (Base, interfaces.MANYTOONE, "users"), - (Base, interfaces.ONETOMANY, "addresses_collection"), + (Base, interfaces.MANYTOONE, "nodes"), + (Base, interfaces.MANYTOMANY, "keywords_collection"), + (Base, interfaces.MANYTOMANY, "items_collection"), + (Base, interfaces.MANYTOONE, "users"), + (Base, interfaces.ONETOMANY, "addresses_collection"), ]) class AutomapInhTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - Table('single', metadata, - Column('id', Integer, primary_key=True), - Column('type', String(10)), - test_needs_fk=True - ) - - Table('joined_base', metadata, - Column('id', Integer, primary_key=True), - Column('type', String(10)), - test_needs_fk=True - ) - - Table('joined_inh', metadata, - Column('id', Integer, ForeignKey('joined_base.id'), primary_key=True), - test_needs_fk=True - ) + Table( + 'single', metadata, + Column('id', Integer, primary_key=True), + Column('type', String(10)), + test_needs_fk=True + ) + + Table( + 'joined_base', metadata, + Column('id', Integer, primary_key=True), + Column('type', String(10)), + test_needs_fk=True + ) + + Table( + 'joined_inh', metadata, + Column( + 'id', Integer, + ForeignKey('joined_base.id'), primary_key=True), + test_needs_fk=True + ) FixtureTest.define_tables(metadata) @@ -179,7 +192,8 @@ class AutomapInhTest(fixtures.MappedTest): type = Column(String) - __mapper_args__ = {"polymorphic_identity": "u0", + __mapper_args__ = { + "polymorphic_identity": "u0", "polymorphic_on": type} class SubUser1(Single): @@ -200,14 +214,14 @@ class AutomapInhTest(fixtures.MappedTest): type = Column(String) - __mapper_args__ = {"polymorphic_identity": "u0", + __mapper_args__ = { + "polymorphic_identity": "u0", "polymorphic_on": type} class SubJoined(Joined): __tablename__ = 'joined_inh' __mapper_args__ = {"polymorphic_identity": "u1"} - Base.prepare(engine=testing.db, reflect=True) assert SubJoined.__mapper__.inherits is Joined.__mapper__ @@ -217,6 +231,9 @@ class AutomapInhTest(fixtures.MappedTest): def test_conditional_relationship(self): Base = automap_base() + def _gen_relationship(*arg, **kw): return None - Base.prepare(engine=testing.db, reflect=True, generate_relationship=_gen_relationship) + Base.prepare( + engine=testing.db, reflect=True, + generate_relationship=_gen_relationship) -- cgit v1.2.1 From 5508388f0325ea75d311a2ef7ee4cbd6b1b8f354 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 22 Sep 2014 23:00:45 -0400 Subject: - The :mod:`sqlalchemy.ext.automap` extension will now set ``cascade="all, delete-orphan"`` automatically on a one-to-many relationship/backref where the foreign key is detected as containing one or more non-nullable columns. This argument is present in the keywords passed to :func:`.automap.generate_relationship` in this case and can still be overridden. Additionally, if the :class:`.ForeignKeyConstraint` specifies ``ondelete="CASCADE"`` for a non-nullable or ``ondelete="SET NULL"`` for a nullable set of columns, the argument ``passive_deletes=True`` is also added to the relationship. Note that not all backends support reflection of ondelete, but backends that do include Postgresql and MySQL. fixes #3210 --- doc/build/changelog/changelog_10.rst | 16 +++++++++ lib/sqlalchemy/ext/automap.py | 46 ++++++++++++++++++++---- test/ext/test_automap.py | 68 +++++++++++++++++++++++++++++++++++- 3 files changed, 123 insertions(+), 7 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 7d7548e11..88cae563f 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,22 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: feature, ext + :tickets: 3210 + + The :mod:`sqlalchemy.ext.automap` extension will now set + ``cascade="all, delete-orphan"`` automatically on a one-to-many + relationship/backref where the foreign key is detected as containing + one or more non-nullable columns. This argument is present in the + keywords passed to :func:`.automap.generate_relationship` in this + case and can still be overridden. Additionally, if the + :class:`.ForeignKeyConstraint` specifies ``ondelete="CASCADE"`` + for a non-nullable or ``ondelete="SET NULL"`` for a nullable set + of columns, the argument ``passive_deletes=True`` is also added to the + relationship. Note that not all backends support reflection of + ondelete, but backends that do include Postgresql and MySQL. + .. change:: :tags: feature, sql :tickets: 3206 diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py index 121285ab3..c11795d37 100644 --- a/lib/sqlalchemy/ext/automap.py +++ b/lib/sqlalchemy/ext/automap.py @@ -243,7 +243,26 @@ follows: one-to-many backref will be created on the referred class referring to this class. -4. The names of the relationships are determined using the +4. If any of the columns that are part of the :class:`.ForeignKeyConstraint` + are not nullable (e.g. ``nullable=False``), a + :paramref:`~.relationship.cascade` keyword argument + of ``all, delete-orphan`` will be added to the keyword arguments to + be passed to the relationship or backref. If the + :class:`.ForeignKeyConstraint` reports that + :paramref:`.ForeignKeyConstraint.ondelete` + is set to ``CASCADE`` for a not null or ``SET NULL`` for a nullable + set of columns, the option :paramref:`~.relationship.passive_deletes` + flag is set to ``True`` in the set of relationship keyword arguments. + Note that not all backends support reflection of ON DELETE. + + .. versionadded:: 1.0.0 - automap will detect non-nullable foreign key + constraints when producing a one-to-many relationship and establish + a default cascade of ``all, delete-orphan`` if so; additionally, + if the constraint specifies :paramref:`.ForeignKeyConstraint.ondelete` + of ``CASCADE`` for non-nullable or ``SET NULL`` for nullable columns, + the ``passive_deletes=True`` option is also added. + +5. The names of the relationships are determined using the :paramref:`.AutomapBase.prepare.name_for_scalar_relationship` and :paramref:`.AutomapBase.prepare.name_for_collection_relationship` callable functions. It is important to note that the default relationship @@ -252,18 +271,18 @@ follows: alternate class naming scheme, that's the name from which the relationship name will be derived. -5. The classes are inspected for an existing mapped property matching these +6. The classes are inspected for an existing mapped property matching these names. If one is detected on one side, but none on the other side, :class:`.AutomapBase` attempts to create a relationship on the missing side, then uses the :paramref:`.relationship.back_populates` parameter in order to point the new relationship to the other side. -6. In the usual case where no relationship is on either side, +7. In the usual case where no relationship is on either side, :meth:`.AutomapBase.prepare` produces a :func:`.relationship` on the "many-to-one" side and matches it to the other using the :paramref:`.relationship.backref` parameter. -7. Production of the :func:`.relationship` and optionally the :func:`.backref` +8. Production of the :func:`.relationship` and optionally the :func:`.backref` is handed off to the :paramref:`.AutomapBase.prepare.generate_relationship` function, which can be supplied by the end-user in order to augment the arguments passed to :func:`.relationship` or :func:`.backref` or to @@ -877,6 +896,19 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config, constraint ) + o2m_kws = {} + nullable = False not in set([fk.parent.nullable for fk in fks]) + if not nullable: + o2m_kws['cascade'] = "all, delete-orphan" + + if constraint.ondelete and \ + constraint.ondelete.lower() == "cascade": + o2m_kws['passive_deletes'] = True + else: + if constraint.ondelete and \ + constraint.ondelete.lower() == "set null": + o2m_kws['passive_deletes'] = True + create_backref = backref_name not in referred_cfg.properties if relationship_name not in map_config.properties: @@ -885,7 +917,8 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config, automap_base, interfaces.ONETOMANY, backref, backref_name, referred_cls, local_cls, - collection_class=collection_class) + collection_class=collection_class, + **o2m_kws) else: backref_obj = None rel = generate_relationship(automap_base, @@ -916,7 +949,8 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config, fk.parent for fk in constraint.elements], back_populates=relationship_name, - collection_class=collection_class) + collection_class=collection_class, + **o2m_kws) if rel is not None: referred_cfg.properties[backref_name] = rel map_config.properties[ diff --git a/test/ext/test_automap.py b/test/ext/test_automap.py index 6cfd0fbca..0a57b9caa 100644 --- a/test/ext/test_automap.py +++ b/test/ext/test_automap.py @@ -1,7 +1,7 @@ from sqlalchemy.testing import fixtures from ..orm._fixtures import FixtureTest from sqlalchemy.ext.automap import automap_base -from sqlalchemy.orm import relationship, interfaces +from sqlalchemy.orm import relationship, interfaces, configure_mappers from sqlalchemy.ext.automap import generate_relationship from sqlalchemy.testing.mock import Mock from sqlalchemy import String, Integer, ForeignKey @@ -157,6 +157,72 @@ class AutomapTest(fixtures.MappedTest): ]) +class CascadeTest(fixtures.MappedTest): + @classmethod + def define_tables(cls, metadata): + Table( + "a", metadata, + Column('id', Integer, primary_key=True) + ) + Table( + "b", metadata, + Column('id', Integer, primary_key=True), + Column('aid', ForeignKey('a.id'), nullable=True) + ) + Table( + "c", metadata, + Column('id', Integer, primary_key=True), + Column('aid', ForeignKey('a.id'), nullable=False) + ) + Table( + "d", metadata, + Column('id', Integer, primary_key=True), + Column( + 'aid', ForeignKey('a.id', ondelete="cascade"), nullable=False) + ) + Table( + "e", metadata, + Column('id', Integer, primary_key=True), + Column( + 'aid', ForeignKey('a.id', ondelete="set null"), + nullable=True) + ) + + def test_o2m_relationship_cascade(self): + Base = automap_base(metadata=self.metadata) + Base.prepare() + + configure_mappers() + + b_rel = Base.classes.a.b_collection + assert not b_rel.property.cascade.delete + assert not b_rel.property.cascade.delete_orphan + assert not b_rel.property.passive_deletes + + assert b_rel.property.cascade.save_update + + c_rel = Base.classes.a.c_collection + assert c_rel.property.cascade.delete + assert c_rel.property.cascade.delete_orphan + assert not c_rel.property.passive_deletes + + assert c_rel.property.cascade.save_update + + d_rel = Base.classes.a.d_collection + assert d_rel.property.cascade.delete + assert d_rel.property.cascade.delete_orphan + assert d_rel.property.passive_deletes + + assert d_rel.property.cascade.save_update + + e_rel = Base.classes.a.e_collection + assert not e_rel.property.cascade.delete + assert not e_rel.property.cascade.delete_orphan + assert e_rel.property.passive_deletes + + assert e_rel.property.cascade.save_update + + class AutomapInhTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): -- cgit v1.2.1 From 360477cc3af826b5056039b9a19ec3ecb2b94ede Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 23 Sep 2014 13:25:22 -0400 Subject: - flake8 all of test/ext/declarative --- test/ext/declarative/test_basic.py | 408 +++++++++++++++++-------------- test/ext/declarative/test_clsregistry.py | 5 +- test/ext/declarative/test_inheritance.py | 278 +++++++++++---------- test/ext/declarative/test_mixin.py | 86 ++++--- test/ext/declarative/test_reflection.py | 193 ++++++++------- 5 files changed, 543 insertions(+), 427 deletions(-) diff --git a/test/ext/declarative/test_basic.py b/test/ext/declarative/test_basic.py index e2c2af679..3fac39cac 100644 --- a/test/ext/declarative/test_basic.py +++ b/test/ext/declarative/test_basic.py @@ -1,6 +1,6 @@ from sqlalchemy.testing import eq_, assert_raises, \ - assert_raises_message, is_ + assert_raises_message from sqlalchemy.ext import declarative as decl from sqlalchemy import exc import sqlalchemy as sa @@ -10,21 +10,21 @@ from sqlalchemy import MetaData, Integer, String, ForeignKey, \ from sqlalchemy.testing.schema import Table, Column from sqlalchemy.orm import relationship, create_session, class_mapper, \ joinedload, configure_mappers, backref, clear_mappers, \ - deferred, column_property, composite,\ - Session, properties -from sqlalchemy.testing import eq_ -from sqlalchemy.util import classproperty, with_metaclass -from sqlalchemy.ext.declarative import declared_attr, AbstractConcreteBase, \ - ConcreteBase, synonym_for + column_property, composite, Session, properties +from sqlalchemy.util import with_metaclass +from sqlalchemy.ext.declarative import declared_attr, synonym_for from sqlalchemy.testing import fixtures -from sqlalchemy.testing.util import gc_collect Base = None +User = Address = None + + class DeclarativeTestBase(fixtures.TestBase, - testing.AssertsExecutionResults, - testing.AssertsCompiledSQL): + testing.AssertsExecutionResults, + testing.AssertsCompiledSQL): __dialect__ = 'default' + def setup(self): global Base Base = decl.declarative_base(testing.db) @@ -34,13 +34,15 @@ class DeclarativeTestBase(fixtures.TestBase, clear_mappers() Base.metadata.drop_all() + class DeclarativeTest(DeclarativeTestBase): + def test_basic(self): class User(Base, fixtures.ComparableEntity): __tablename__ = 'users' id = Column('id', Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) name = Column('name', String(50)) addresses = relationship("Address", backref="user") @@ -48,7 +50,7 @@ class DeclarativeTest(DeclarativeTestBase): __tablename__ = 'addresses' id = Column(Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) email = Column(String(50), key='_email') user_id = Column('user_id', Integer, ForeignKey('users.id'), key='_user_id') @@ -82,7 +84,7 @@ class DeclarativeTest(DeclarativeTestBase): __tablename__ = 'users' id = Column('id', Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) name = Column('name', String(50)) addresses = relationship(util.u("Address"), backref="user") @@ -90,7 +92,7 @@ class DeclarativeTest(DeclarativeTestBase): __tablename__ = 'addresses' id = Column(Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) email = Column(String(50), key='_email') user_id = Column('user_id', Integer, ForeignKey('users.id'), key='_user_id') @@ -120,8 +122,10 @@ class DeclarativeTest(DeclarativeTestBase): __table_args__ = () def test_cant_add_columns(self): - t = Table('t', Base.metadata, Column('id', Integer, - primary_key=True), Column('data', String)) + t = Table( + 't', Base.metadata, + Column('id', Integer, primary_key=True), + Column('data', String)) def go(): class User(Base): @@ -158,7 +162,6 @@ class DeclarativeTest(DeclarativeTestBase): go ) - def test_column_repeated_under_prop(self): def go(): class Foo(Base): @@ -180,6 +183,7 @@ class DeclarativeTest(DeclarativeTestBase): class A(Base): __tablename__ = 'a' id = Column(Integer, primary_key=True) + class B(Base): __tablename__ = 'b' id = Column(Integer, primary_key=True) @@ -196,6 +200,7 @@ class DeclarativeTest(DeclarativeTestBase): class A(Base): __tablename__ = 'a' id = Column(Integer, primary_key=True) + class B(Base): __tablename__ = 'b' id = Column(Integer, primary_key=True) @@ -213,11 +218,12 @@ class DeclarativeTest(DeclarativeTestBase): # metaclass to mock the way zope.interface breaks getattr() class BrokenMeta(type): + def __getattribute__(self, attr): if attr == 'xyzzy': raise AttributeError('xyzzy') else: - return object.__getattribute__(self,attr) + return object.__getattribute__(self, attr) # even though this class has an xyzzy attribute, getattr(cls,"xyzzy") # fails @@ -225,13 +231,13 @@ class DeclarativeTest(DeclarativeTestBase): xyzzy = "magic" # _as_declarative() inspects obj.__class__.__bases__ - class User(BrokenParent,fixtures.ComparableEntity): + class User(BrokenParent, fixtures.ComparableEntity): __tablename__ = 'users' id = Column('id', Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) name = Column('name', String(50)) - decl.instrument_declarative(User,{},Base.metadata) + decl.instrument_declarative(User, {}, Base.metadata) def test_reserved_identifiers(self): def go1(): @@ -285,29 +291,28 @@ class DeclarativeTest(DeclarativeTestBase): email = Column('email', String(50)) user_id = Column('user_id', Integer, ForeignKey('users.id')) user = relationship("User", primaryjoin=user_id == User.id, - backref="addresses") + backref="addresses") assert mapperlib.Mapper._new_mappers is True - u = User() + u = User() # noqa assert User.addresses assert mapperlib.Mapper._new_mappers is False def test_string_dependency_resolution(self): - from sqlalchemy.sql import desc - class User(Base, fixtures.ComparableEntity): __tablename__ = 'users' id = Column(Integer, primary_key=True, test_needs_autoincrement=True) name = Column(String(50)) - addresses = relationship('Address', - order_by='desc(Address.email)', - primaryjoin='User.id==Address.user_id', - foreign_keys='[Address.user_id]', - backref=backref('user', - primaryjoin='User.id==Address.user_id', - foreign_keys='[Address.user_id]')) + addresses = relationship( + 'Address', + order_by='desc(Address.email)', + primaryjoin='User.id==Address.user_id', + foreign_keys='[Address.user_id]', + backref=backref('user', + primaryjoin='User.id==Address.user_id', + foreign_keys='[Address.user_id]')) class Address(Base, fixtures.ComparableEntity): @@ -319,14 +324,17 @@ class DeclarativeTest(DeclarativeTestBase): Base.metadata.create_all() sess = create_session() - u1 = User(name='ed', addresses=[Address(email='abc'), - Address(email='def'), Address(email='xyz')]) + u1 = User( + name='ed', addresses=[ + Address(email='abc'), + Address(email='def'), Address(email='xyz')]) sess.add(u1) sess.flush() sess.expunge_all() eq_(sess.query(User).filter(User.name == 'ed').one(), - User(name='ed', addresses=[Address(email='xyz'), - Address(email='def'), Address(email='abc')])) + User(name='ed', addresses=[ + Address(email='xyz'), + Address(email='def'), Address(email='abc')])) class Foo(Base, fixtures.ComparableEntity): @@ -340,7 +348,6 @@ class DeclarativeTest(DeclarativeTestBase): "ColumnProperty", configure_mappers) def test_string_dependency_resolution_synonym(self): - from sqlalchemy.sql import desc class User(Base, fixtures.ComparableEntity): @@ -416,12 +423,13 @@ class DeclarativeTest(DeclarativeTestBase): id = Column(Integer, primary_key=True) b_id = Column(ForeignKey('b.id')) - d = relationship("D", - secondary="join(B, D, B.d_id == D.id)." - "join(C, C.d_id == D.id)", - primaryjoin="and_(A.b_id == B.id, A.id == C.a_id)", - secondaryjoin="D.id == B.d_id", - ) + d = relationship( + "D", + secondary="join(B, D, B.d_id == D.id)." + "join(C, C.d_id == D.id)", + primaryjoin="and_(A.b_id == B.id, A.id == C.a_id)", + secondaryjoin="D.id == B.d_id", + ) class B(Base): __tablename__ = 'b' @@ -444,9 +452,9 @@ class DeclarativeTest(DeclarativeTestBase): self.assert_compile( s.query(A).join(A.d), "SELECT a.id AS a_id, a.b_id AS a_b_id FROM a JOIN " - "(b AS b_1 JOIN d AS d_1 ON b_1.d_id = d_1.id " - "JOIN c AS c_1 ON c_1.d_id = d_1.id) ON a.b_id = b_1.id " - "AND a.id = c_1.a_id JOIN d ON d.id = b_1.d_id", + "(b AS b_1 JOIN d AS d_1 ON b_1.d_id = d_1.id " + "JOIN c AS c_1 ON c_1.d_id = d_1.id) ON a.b_id = b_1.id " + "AND a.id = c_1.a_id JOIN d ON d.id = b_1.d_id", ) def test_string_dependency_resolution_no_table(self): @@ -474,6 +482,7 @@ class DeclarativeTest(DeclarativeTestBase): id = Column(Integer, primary_key=True, test_needs_autoincrement=True) name = Column(String(50)) + class Address(Base, fixtures.ComparableEntity): __tablename__ = 'addresses' @@ -481,7 +490,8 @@ class DeclarativeTest(DeclarativeTestBase): test_needs_autoincrement=True) email = Column(String(50)) user_id = Column(Integer) - user = relationship("User", + user = relationship( + "User", primaryjoin="remote(User.id)==foreign(Address.user_id)" ) @@ -497,9 +507,9 @@ class DeclarativeTest(DeclarativeTestBase): __tablename__ = 'users' id = Column(Integer, primary_key=True) - addresses = relationship('Address', - primaryjoin='User.id==Address.user_id.prop.columns[' - '0]') + addresses = relationship( + 'Address', + primaryjoin='User.id==Address.user_id.prop.columns[0]') class Address(Base, fixtures.ComparableEntity): @@ -516,9 +526,10 @@ class DeclarativeTest(DeclarativeTestBase): __tablename__ = 'users' id = Column(Integer, primary_key=True) - addresses = relationship('%s.Address' % __name__, - primaryjoin='%s.User.id==%s.Address.user_id.prop.columns[' - '0]' % (__name__, __name__)) + addresses = relationship( + '%s.Address' % __name__, + primaryjoin='%s.User.id==%s.Address.user_id.prop.columns[0]' + % (__name__, __name__)) class Address(Base, fixtures.ComparableEntity): @@ -538,8 +549,8 @@ class DeclarativeTest(DeclarativeTestBase): id = Column(Integer, primary_key=True) name = Column(String(50)) addresses = relationship('Address', - primaryjoin='User.id==Address.user_id', - backref='user') + primaryjoin='User.id==Address.user_id', + backref='user') class Address(Base, fixtures.ComparableEntity): @@ -571,10 +582,11 @@ class DeclarativeTest(DeclarativeTestBase): id = Column(Integer, primary_key=True) name = Column(String(50)) - user_to_prop = Table('user_to_prop', Base.metadata, - Column('user_id', Integer, - ForeignKey('users.id')), Column('prop_id', - Integer, ForeignKey('props.id'))) + user_to_prop = Table( + 'user_to_prop', Base.metadata, + Column('user_id', Integer, ForeignKey('users.id')), + Column('prop_id', Integer, ForeignKey('props.id'))) + configure_mappers() assert class_mapper(User).get_property('props').secondary \ is user_to_prop @@ -585,27 +597,29 @@ class DeclarativeTest(DeclarativeTestBase): class User(Base): __tablename__ = 'users' - __table_args__ = {'schema':'fooschema'} + __table_args__ = {'schema': 'fooschema'} id = Column(Integer, primary_key=True) name = Column(String(50)) - props = relationship('Prop', secondary='fooschema.user_to_prop', - primaryjoin='User.id==fooschema.user_to_prop.c.user_id', - secondaryjoin='fooschema.user_to_prop.c.prop_id==Prop.id', - backref='users') + props = relationship( + 'Prop', secondary='fooschema.user_to_prop', + primaryjoin='User.id==fooschema.user_to_prop.c.user_id', + secondaryjoin='fooschema.user_to_prop.c.prop_id==Prop.id', + backref='users') class Prop(Base): __tablename__ = 'props' - __table_args__ = {'schema':'fooschema'} + __table_args__ = {'schema': 'fooschema'} id = Column(Integer, primary_key=True) name = Column(String(50)) - user_to_prop = Table('user_to_prop', Base.metadata, - Column('user_id', Integer, ForeignKey('fooschema.users.id')), - Column('prop_id',Integer, ForeignKey('fooschema.props.id')), - schema='fooschema') + user_to_prop = Table( + 'user_to_prop', Base.metadata, + Column('user_id', Integer, ForeignKey('fooschema.users.id')), + Column('prop_id', Integer, ForeignKey('fooschema.props.id')), + schema='fooschema') configure_mappers() assert class_mapper(User).get_property('props').secondary \ @@ -618,9 +632,11 @@ class DeclarativeTest(DeclarativeTestBase): __tablename__ = 'parent' id = Column(Integer, primary_key=True) name = Column(String) - children = relationship("Child", - primaryjoin="Parent.name==remote(foreign(func.lower(Child.name_upper)))" - ) + children = relationship( + "Child", + primaryjoin="Parent.name==" + "remote(foreign(func.lower(Child.name_upper)))" + ) class Child(Base): __tablename__ = 'child' @@ -667,8 +683,8 @@ class DeclarativeTest(DeclarativeTestBase): test_needs_autoincrement=True) name = Column(String(50)) addresses = relationship('Address', order_by=Address.email, - foreign_keys=Address.user_id, - remote_side=Address.user_id) + foreign_keys=Address.user_id, + remote_side=Address.user_id) # get the mapper for User. User mapper will compile, # "addresses" relationship will call upon Address.user_id for @@ -681,14 +697,16 @@ class DeclarativeTest(DeclarativeTestBase): class_mapper(User) Base.metadata.create_all() sess = create_session() - u1 = User(name='ed', addresses=[Address(email='abc'), - Address(email='xyz'), Address(email='def')]) + u1 = User(name='ed', addresses=[ + Address(email='abc'), + Address(email='xyz'), Address(email='def')]) sess.add(u1) sess.flush() sess.expunge_all() eq_(sess.query(User).filter(User.name == 'ed').one(), - User(name='ed', addresses=[Address(email='abc'), - Address(email='def'), Address(email='xyz')])) + User(name='ed', addresses=[ + Address(email='abc'), + Address(email='def'), Address(email='xyz')])) def test_nice_dependency_error(self): @@ -726,14 +744,16 @@ class DeclarativeTest(DeclarativeTestBase): # the exception is preserved. Remains the # same through repeated calls. for i in range(3): - assert_raises_message(sa.exc.InvalidRequestError, - "^One or more mappers failed to initialize - " - "can't proceed with initialization of other " - "mappers. Original exception was: When initializing.*", - configure_mappers) + assert_raises_message( + sa.exc.InvalidRequestError, + "^One or more mappers failed to initialize - " + "can't proceed with initialization of other " + "mappers. Original exception was: When initializing.*", + configure_mappers) def test_custom_base(self): class MyBase(object): + def foobar(self): return "foobar" Base = decl.declarative_base(cls=MyBase) @@ -761,7 +781,7 @@ class DeclarativeTest(DeclarativeTestBase): Base.metadata.create_all() configure_mappers() assert class_mapper(Detail).get_property('master' - ).strategy.use_get + ).strategy.use_get m1 = Master() d1 = Detail(master=m1) sess = create_session() @@ -821,13 +841,15 @@ class DeclarativeTest(DeclarativeTestBase): eq_(Address.__table__.c['_email'].name, 'email') eq_(Address.__table__.c['_user_id'].name, 'user_id') u1 = User(name='u1', addresses=[Address(email='one'), - Address(email='two')]) + Address(email='two')]) sess = create_session() sess.add(u1) sess.flush() sess.expunge_all() - eq_(sess.query(User).all(), [User(name='u1', - addresses=[Address(email='one'), Address(email='two')])]) + eq_(sess.query(User).all(), [ + User( + name='u1', + addresses=[Address(email='one'), Address(email='two')])]) a1 = sess.query(Address).filter(Address.email == 'two').one() eq_(a1, Address(email='two')) eq_(a1.user, User(name='u1')) @@ -842,7 +864,8 @@ class DeclarativeTest(DeclarativeTestBase): class ASub(A): brap = A.data assert ASub.brap.property is A.data.property - assert isinstance(ASub.brap.original_property, properties.SynonymProperty) + assert isinstance( + ASub.brap.original_property, properties.SynonymProperty) def test_alt_name_attr_subclass_relationship_inline(self): # [ticket:2900] @@ -857,10 +880,12 @@ class DeclarativeTest(DeclarativeTestBase): id = Column('id', Integer, primary_key=True) configure_mappers() + class ASub(A): brap = A.b assert ASub.brap.property is A.b.property - assert isinstance(ASub.brap.original_property, properties.SynonymProperty) + assert isinstance( + ASub.brap.original_property, properties.SynonymProperty) ASub(brap=B()) def test_alt_name_attr_subclass_column_attrset(self): @@ -881,6 +906,7 @@ class DeclarativeTest(DeclarativeTestBase): b_id = Column(Integer, ForeignKey('b.id')) b = relationship("B", backref="as_") A.brap = A.b + class B(Base): __tablename__ = 'b' id = Column('id', Integer, primary_key=True) @@ -889,7 +915,6 @@ class DeclarativeTest(DeclarativeTestBase): assert isinstance(A.brap.original_property, properties.SynonymProperty) A(brap=B()) - def test_eager_order_by(self): class Address(Base, fixtures.ComparableEntity): @@ -910,14 +935,14 @@ class DeclarativeTest(DeclarativeTestBase): Base.metadata.create_all() u1 = User(name='u1', addresses=[Address(email='two'), - Address(email='one')]) + Address(email='one')]) sess = create_session() sess.add(u1) sess.flush() sess.expunge_all() eq_(sess.query(User).options(joinedload(User.addresses)).all(), [User(name='u1', addresses=[Address(email='one'), - Address(email='two')])]) + Address(email='two')])]) def test_order_by_multi(self): @@ -936,17 +961,17 @@ class DeclarativeTest(DeclarativeTestBase): test_needs_autoincrement=True) name = Column('name', String(50)) addresses = relationship('Address', - order_by=(Address.email, Address.id)) + order_by=(Address.email, Address.id)) Base.metadata.create_all() u1 = User(name='u1', addresses=[Address(email='two'), - Address(email='one')]) + Address(email='one')]) sess = create_session() sess.add(u1) sess.flush() sess.expunge_all() u = sess.query(User).filter(User.name == 'u1').one() - a = u.addresses + u.addresses def test_as_declarative(self): @@ -971,13 +996,15 @@ class DeclarativeTest(DeclarativeTestBase): decl.instrument_declarative(Address, reg, Base.metadata) Base.metadata.create_all() u1 = User(name='u1', addresses=[Address(email='one'), - Address(email='two')]) + Address(email='two')]) sess = create_session() sess.add(u1) sess.flush() sess.expunge_all() - eq_(sess.query(User).all(), [User(name='u1', - addresses=[Address(email='one'), Address(email='two')])]) + eq_(sess.query(User).all(), [ + User( + name='u1', + addresses=[Address(email='one'), Address(email='two')])]) def test_custom_mapper_attribute(self): @@ -1045,7 +1072,7 @@ class DeclarativeTest(DeclarativeTestBase): __tablename__ = 'foo' __table_args__ = ForeignKeyConstraint(['id'], ['foo.id' - ]) + ]) id = Column('id', Integer, primary_key=True) assert_raises_message(sa.exc.ArgumentError, '__table_args__ value must be a tuple, ', err) @@ -1107,17 +1134,18 @@ class DeclarativeTest(DeclarativeTestBase): User.address_count = \ sa.orm.column_property(sa.select([sa.func.count(Address.id)]). - where(Address.user_id - == User.id).as_scalar()) + where(Address.user_id + == User.id).as_scalar()) Base.metadata.create_all() u1 = User(name='u1', addresses=[Address(email='one'), - Address(email='two')]) + Address(email='two')]) sess = create_session() sess.add(u1) sess.flush() sess.expunge_all() - eq_(sess.query(User).all(), [User(name='u1', address_count=2, - addresses=[Address(email='one'), Address(email='two')])]) + eq_(sess.query(User).all(), [ + User(name='u1', address_count=2, + addresses=[Address(email='one'), Address(email='two')])]) def test_useless_declared_attr(self): class Address(Base, fixtures.ComparableEntity): @@ -1140,23 +1168,26 @@ class DeclarativeTest(DeclarativeTestBase): def address_count(cls): # this doesn't really gain us anything. but if # one is used, lets have it function as expected... - return sa.orm.column_property(sa.select([sa.func.count(Address.id)]). - where(Address.user_id == cls.id)) + return sa.orm.column_property( + sa.select([sa.func.count(Address.id)]). + where(Address.user_id == cls.id)) Base.metadata.create_all() u1 = User(name='u1', addresses=[Address(email='one'), - Address(email='two')]) + Address(email='two')]) sess = create_session() sess.add(u1) sess.flush() sess.expunge_all() - eq_(sess.query(User).all(), [User(name='u1', address_count=2, - addresses=[Address(email='one'), Address(email='two')])]) + eq_(sess.query(User).all(), [ + User(name='u1', address_count=2, + addresses=[Address(email='one'), Address(email='two')])]) def test_declared_on_base_class(self): class MyBase(Base): __tablename__ = 'foo' id = Column(Integer, primary_key=True) + @declared_attr def somecol(cls): return Column(Integer) @@ -1213,18 +1244,19 @@ class DeclarativeTest(DeclarativeTestBase): adr_count = \ sa.orm.column_property( sa.select([sa.func.count(Address.id)], - Address.user_id == id).as_scalar()) + Address.user_id == id).as_scalar()) addresses = relationship(Address) Base.metadata.create_all() u1 = User(name='u1', addresses=[Address(email='one'), - Address(email='two')]) + Address(email='two')]) sess = create_session() sess.add(u1) sess.flush() sess.expunge_all() - eq_(sess.query(User).all(), [User(name='u1', adr_count=2, - addresses=[Address(email='one'), Address(email='two')])]) + eq_(sess.query(User).all(), [ + User(name='u1', adr_count=2, + addresses=[Address(email='one'), Address(email='two')])]) def test_column_properties_2(self): @@ -1248,7 +1280,7 @@ class DeclarativeTest(DeclarativeTestBase): eq_(set(User.__table__.c.keys()), set(['id', 'name'])) eq_(set(Address.__table__.c.keys()), set(['id', 'email', - 'user_id'])) + 'user_id'])) def test_deferred(self): @@ -1274,86 +1306,91 @@ class DeclarativeTest(DeclarativeTestBase): def test_composite_inline(self): class AddressComposite(fixtures.ComparableEntity): + def __init__(self, street, state): self.street = street self.state = state + def __composite_values__(self): return [self.street, self.state] class User(Base, fixtures.ComparableEntity): __tablename__ = 'user' id = Column(Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) address = composite(AddressComposite, - Column('street', String(50)), - Column('state', String(2)), - ) + Column('street', String(50)), + Column('state', String(2)), + ) Base.metadata.create_all() sess = Session() sess.add(User( - address=AddressComposite('123 anywhere street', - 'MD') - )) + address=AddressComposite('123 anywhere street', + 'MD') + )) sess.commit() eq_( sess.query(User).all(), [User(address=AddressComposite('123 anywhere street', - 'MD'))] + 'MD'))] ) def test_composite_separate(self): class AddressComposite(fixtures.ComparableEntity): + def __init__(self, street, state): self.street = street self.state = state + def __composite_values__(self): return [self.street, self.state] class User(Base, fixtures.ComparableEntity): __tablename__ = 'user' id = Column(Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) street = Column(String(50)) state = Column(String(2)) address = composite(AddressComposite, - street, state) + street, state) Base.metadata.create_all() sess = Session() sess.add(User( - address=AddressComposite('123 anywhere street', - 'MD') - )) + address=AddressComposite('123 anywhere street', + 'MD') + )) sess.commit() eq_( sess.query(User).all(), [User(address=AddressComposite('123 anywhere street', - 'MD'))] + 'MD'))] ) def test_mapping_to_join(self): users = Table('users', Base.metadata, - Column('id', Integer, primary_key=True) - ) + Column('id', Integer, primary_key=True) + ) addresses = Table('addresses', Base.metadata, - Column('id', Integer, primary_key=True), - Column('user_id', Integer, ForeignKey('users.id')) - ) + Column('id', Integer, primary_key=True), + Column('user_id', Integer, ForeignKey('users.id')) + ) usersaddresses = sa.join(users, addresses, users.c.id == addresses.c.user_id) + class User(Base): __table__ = usersaddresses - __table_args__ = {'primary_key':[users.c.id]} + __table_args__ = {'primary_key': [users.c.id]} # need to use column_property for now user_id = column_property(users.c.id, addresses.c.user_id) address_id = addresses.c.id assert User.__mapper__.get_property('user_id').columns[0] \ - is users.c.id + is users.c.id assert User.__mapper__.get_property('user_id').columns[1] \ - is addresses.c.user_id + is addresses.c.user_id def test_synonym_inline(self): @@ -1372,7 +1409,7 @@ class DeclarativeTest(DeclarativeTestBase): name = sa.orm.synonym('_name', descriptor=property(_get_name, - _set_name)) + _set_name)) Base.metadata.create_all() sess = create_session() @@ -1381,7 +1418,7 @@ class DeclarativeTest(DeclarativeTestBase): sess.add(u1) sess.flush() eq_(sess.query(User).filter(User.name == 'SOMENAME someuser' - ).one(), u1) + ).one(), u1) def test_synonym_no_descriptor(self): from sqlalchemy.orm.properties import ColumnProperty @@ -1434,7 +1471,7 @@ class DeclarativeTest(DeclarativeTestBase): sess.add(u1) sess.flush() eq_(sess.query(User).filter(User.name == 'SOMENAME someuser' - ).one(), u1) + ).one(), u1) def test_reentrant_compile_via_foreignkey(self): @@ -1465,13 +1502,14 @@ class DeclarativeTest(DeclarativeTestBase): ) Base.metadata.create_all() u1 = User(name='u1', addresses=[Address(email='one'), - Address(email='two')]) + Address(email='two')]) sess = create_session() sess.add(u1) sess.flush() sess.expunge_all() - eq_(sess.query(User).all(), [User(name='u1', - addresses=[Address(email='one'), Address(email='two')])]) + eq_(sess.query(User).all(), [ + User(name='u1', + addresses=[Address(email='one'), Address(email='two')])]) def test_relationship_reference(self): @@ -1490,21 +1528,22 @@ class DeclarativeTest(DeclarativeTestBase): test_needs_autoincrement=True) name = Column('name', String(50)) addresses = relationship('Address', backref='user', - primaryjoin=id == Address.user_id) + primaryjoin=id == Address.user_id) User.address_count = \ sa.orm.column_property(sa.select([sa.func.count(Address.id)]). - where(Address.user_id - == User.id).as_scalar()) + where(Address.user_id + == User.id).as_scalar()) Base.metadata.create_all() u1 = User(name='u1', addresses=[Address(email='one'), - Address(email='two')]) + Address(email='two')]) sess = create_session() sess.add(u1) sess.flush() sess.expunge_all() - eq_(sess.query(User).all(), [User(name='u1', address_count=2, - addresses=[Address(email='one'), Address(email='two')])]) + eq_(sess.query(User).all(), [ + User(name='u1', address_count=2, + addresses=[Address(email='one'), Address(email='two')])]) def test_pk_with_fk_init(self): @@ -1526,9 +1565,11 @@ class DeclarativeTest(DeclarativeTestBase): def test_with_explicit_autoloaded(self): meta = MetaData(testing.db) - t1 = Table('t1', meta, Column('id', String(50), + t1 = Table( + 't1', meta, + Column('id', String(50), primary_key=True, test_needs_autoincrement=True), - Column('data', String(50))) + Column('data', String(50))) meta.create_all() try: @@ -1541,7 +1582,7 @@ class DeclarativeTest(DeclarativeTestBase): sess.add(m) sess.flush() eq_(t1.select().execute().fetchall(), [('someid', 'somedata' - )]) + )]) finally: meta.drop_all() @@ -1584,7 +1625,7 @@ class DeclarativeTest(DeclarativeTestBase): op, other, **kw - ): + ): return op(self.upperself, other, **kw) class User(Base, fixtures.ComparableEntity): @@ -1612,7 +1653,7 @@ class DeclarativeTest(DeclarativeTestBase): eq_(rt, u1) sess.expunge_all() rt = sess.query(User).filter(User.uc_name.startswith('SOMEUSE' - )).one() + )).one() eq_(rt, u1) def test_duplicate_classes_in_base(self): @@ -1631,7 +1672,6 @@ class DeclarativeTest(DeclarativeTestBase): ) - def _produce_test(inline, stringbased): class ExplicitJoinTest(fixtures.MappedTest): @@ -1657,35 +1697,43 @@ def _produce_test(inline, stringbased): user_id = Column(Integer, ForeignKey('users.id')) if inline: if stringbased: - user = relationship('User', - primaryjoin='User.id==Address.user_id', - backref='addresses') + user = relationship( + 'User', + primaryjoin='User.id==Address.user_id', + backref='addresses') else: user = relationship(User, primaryjoin=User.id - == user_id, backref='addresses') + == user_id, backref='addresses') if not inline: configure_mappers() if stringbased: - Address.user = relationship('User', - primaryjoin='User.id==Address.user_id', - backref='addresses') + Address.user = relationship( + 'User', + primaryjoin='User.id==Address.user_id', + backref='addresses') else: - Address.user = relationship(User, - primaryjoin=User.id == Address.user_id, - backref='addresses') + Address.user = relationship( + User, + primaryjoin=User.id == Address.user_id, + backref='addresses') @classmethod def insert_data(cls): - params = [dict(list(zip(('id', 'name'), column_values))) - for column_values in [(7, 'jack'), (8, 'ed'), (9, - 'fred'), (10, 'chuck')]] + params = [ + dict(list(zip(('id', 'name'), column_values))) + for column_values in [ + (7, 'jack'), (8, 'ed'), + (9, 'fred'), (10, 'chuck')]] + User.__table__.insert().execute(params) - Address.__table__.insert().execute([dict(list(zip(('id', - 'user_id', 'email'), column_values))) - for column_values in [(1, 7, 'jack@bean.com'), (2, - 8, 'ed@wood.com'), (3, 8, 'ed@bettyboop.com'), (4, - 8, 'ed@lala.com'), (5, 9, 'fred@fred.com')]]) + Address.__table__.insert().execute([ + dict(list(zip(('id', 'user_id', 'email'), column_values))) + for column_values in [ + (1, 7, 'jack@bean.com'), + (2, 8, 'ed@wood.com'), + (3, 8, 'ed@bettyboop.com'), + (4, 8, 'ed@lala.com'), (5, 9, 'fred@fred.com')]]) def test_aliased_join(self): @@ -1699,13 +1747,14 @@ def _produce_test(inline, stringbased): sess = create_session() eq_(sess.query(User).join(User.addresses, - aliased=True).filter(Address.email == 'ed@wood.com' - ).filter(User.addresses.any(Address.email - == 'jack@bean.com')).all(), []) - - ExplicitJoinTest.__name__ = 'ExplicitJoinTest%s%s' % (inline - and 'Inline' or 'Separate', stringbased and 'String' - or 'Literal') + aliased=True).filter( + Address.email == 'ed@wood.com').filter( + User.addresses.any(Address.email == 'jack@bean.com')).all(), + []) + + ExplicitJoinTest.__name__ = 'ExplicitJoinTest%s%s' % ( + inline and 'Inline' or 'Separate', + stringbased and 'String' or 'Literal') return ExplicitJoinTest for inline in True, False: @@ -1713,4 +1762,3 @@ for inline in True, False: testclass = _produce_test(inline, stringbased) exec('%s = testclass' % testclass.__name__) del testclass - diff --git a/test/ext/declarative/test_clsregistry.py b/test/ext/declarative/test_clsregistry.py index e78a1abbe..535fd00b3 100644 --- a/test/ext/declarative/test_clsregistry.py +++ b/test/ext/declarative/test_clsregistry.py @@ -5,7 +5,9 @@ from sqlalchemy import exc, MetaData from sqlalchemy.ext.declarative import clsregistry import weakref + class MockClass(object): + def __init__(self, base, name): self._decl_class_registry = base tokens = name.split(".") @@ -183,7 +185,7 @@ class ClsRegistryTest(fixtures.TestBase): f1 = MockClass(base, "foo.bar.Foo") clsregistry.add_class("Foo", f1) reg = base['_sa_module_registry'] - mod_entry = reg['foo']['bar'] + mod_entry = reg['foo']['bar'] # noqa resolver = clsregistry._resolver(f1, MockProp()) resolver = resolver("foo") assert_raises_message( @@ -232,4 +234,3 @@ class ClsRegistryTest(fixtures.TestBase): del f4 gc_collect() assert 'single' not in reg - diff --git a/test/ext/declarative/test_inheritance.py b/test/ext/declarative/test_inheritance.py index edff4421e..e450a1c43 100644 --- a/test/ext/declarative/test_inheritance.py +++ b/test/ext/declarative/test_inheritance.py @@ -10,12 +10,14 @@ from sqlalchemy.orm import relationship, create_session, class_mapper, \ configure_mappers, clear_mappers, \ polymorphic_union, deferred, Session from sqlalchemy.ext.declarative import declared_attr, AbstractConcreteBase, \ - ConcreteBase, has_inherited_table + ConcreteBase, has_inherited_table from sqlalchemy.testing import fixtures Base = None + class DeclarativeTestBase(fixtures.TestBase, testing.AssertsExecutionResults): + def setup(self): global Base Base = decl.declarative_base(testing.db) @@ -25,6 +27,7 @@ class DeclarativeTestBase(fixtures.TestBase, testing.AssertsExecutionResults): clear_mappers() Base.metadata.drop_all() + class DeclarativeInheritanceTest(DeclarativeTestBase): def test_we_must_copy_mapper_args(self): @@ -65,7 +68,6 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): assert class_mapper(Person).version_id_col == 'a' assert class_mapper(Person).include_properties == set(['id', 'a', 'b']) - def test_custom_join_condition(self): class Foo(Base): @@ -123,21 +125,23 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): Base.metadata.create_all() sess = create_session() - c1 = Company(name='MegaCorp, Inc.', - employees=[Engineer(name='dilbert', - primary_language='java'), Engineer(name='wally', - primary_language='c++'), Manager(name='dogbert', - golf_swing='fore!')]) + c1 = Company( + name='MegaCorp, Inc.', + employees=[ + Engineer(name='dilbert', primary_language='java'), + Engineer(name='wally', primary_language='c++'), + Manager(name='dogbert', golf_swing='fore!')]) + c2 = Company(name='Elbonia, Inc.', employees=[Engineer(name='vlad', - primary_language='cobol')]) + primary_language='cobol')]) sess.add(c1) sess.add(c2) sess.flush() sess.expunge_all() eq_(sess.query(Company).filter(Company.employees.of_type(Engineer). - any(Engineer.primary_language - == 'cobol')).first(), c2) + any(Engineer.primary_language + == 'cobol')).first(), c2) # ensure that the Manager mapper was compiled with the Manager id # column as higher priority. this ensures that "Manager.id" @@ -145,8 +149,8 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): # table (reversed from 0.6's behavior.) eq_( - Manager.id.property.columns, - [Manager.__table__.c.id, Person.__table__.c.id] + Manager.id.property.columns, + [Manager.__table__.c.id, Person.__table__.c.id] ) # assert that the "id" column is available without a second @@ -157,13 +161,13 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): def go(): assert sess.query(Manager).filter(Manager.name == 'dogbert' - ).one().id + ).one().id self.assert_sql_count(testing.db, go, 1) sess.expunge_all() def go(): assert sess.query(Person).filter(Manager.name == 'dogbert' - ).one().id + ).one().id self.assert_sql_count(testing.db, go, 1) @@ -186,7 +190,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): primary_key=True) Engineer.primary_language = Column('primary_language', - String(50)) + String(50)) Base.metadata.create_all() sess = create_session() e1 = Engineer(primary_language='java', name='dilbert') @@ -194,7 +198,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): sess.flush() sess.expunge_all() eq_(sess.query(Person).first(), - Engineer(primary_language='java', name='dilbert')) + Engineer(primary_language='java', name='dilbert')) def test_add_parentcol_after_the_fact(self): @@ -258,8 +262,8 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): sess.add(e1) sess.flush() sess.expunge_all() - eq_(sess.query(Person).first(), Admin(primary_language='java', - name='dilbert', workstation='foo')) + eq_(sess.query(Person).first(), + Admin(primary_language='java', name='dilbert', workstation='foo')) def test_subclass_mixin(self): @@ -331,26 +335,25 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): class PlanBooking(Booking): __tablename__ = 'plan_booking' id = Column(Integer, ForeignKey(Booking.id), - primary_key=True) + primary_key=True) # referencing PlanBooking.id gives us the column # on plan_booking, not booking class FeatureBooking(Booking): __tablename__ = 'feature_booking' id = Column(Integer, ForeignKey(Booking.id), - primary_key=True) + primary_key=True) plan_booking_id = Column(Integer, - ForeignKey(PlanBooking.id)) + ForeignKey(PlanBooking.id)) plan_booking = relationship(PlanBooking, - backref='feature_bookings') + backref='feature_bookings') assert FeatureBooking.__table__.c.plan_booking_id.\ - references(PlanBooking.__table__.c.id) + references(PlanBooking.__table__.c.id) assert FeatureBooking.__table__.c.id.\ - references(Booking.__table__.c.id) - + references(Booking.__table__.c.id) def test_single_colsonbase(self): """test single inheritance where all the columns are on the base @@ -387,23 +390,26 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): Base.metadata.create_all() sess = create_session() - c1 = Company(name='MegaCorp, Inc.', - employees=[Engineer(name='dilbert', - primary_language='java'), Engineer(name='wally', - primary_language='c++'), Manager(name='dogbert', - golf_swing='fore!')]) + c1 = Company( + name='MegaCorp, Inc.', + employees=[ + Engineer(name='dilbert', primary_language='java'), + Engineer(name='wally', primary_language='c++'), + Manager(name='dogbert', golf_swing='fore!')]) + c2 = Company(name='Elbonia, Inc.', employees=[Engineer(name='vlad', - primary_language='cobol')]) + primary_language='cobol')]) sess.add(c1) sess.add(c2) sess.flush() sess.expunge_all() eq_(sess.query(Person).filter(Engineer.primary_language - == 'cobol').first(), Engineer(name='vlad')) + == 'cobol').first(), + Engineer(name='vlad')) eq_(sess.query(Company).filter(Company.employees.of_type(Engineer). - any(Engineer.primary_language - == 'cobol')).first(), c2) + any(Engineer.primary_language + == 'cobol')).first(), c2) def test_single_colsonsub(self): """test single inheritance where the columns are local to their @@ -470,15 +476,17 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): sess.flush() sess.expunge_all() eq_(sess.query(Person).filter(Engineer.primary_language - == 'cobol').first(), Engineer(name='vlad')) + == 'cobol').first(), + Engineer(name='vlad')) eq_(sess.query(Company).filter(Company.employees.of_type(Engineer). - any(Engineer.primary_language - == 'cobol')).first(), c2) + any(Engineer.primary_language + == 'cobol')).first(), c2) eq_(sess.query(Engineer).filter_by(primary_language='cobol' - ).one(), Engineer(name='vlad', primary_language='cobol')) + ).one(), + Engineer(name='vlad', primary_language='cobol')) @testing.skip_if(lambda: testing.against('oracle'), - "Test has an empty insert in it at the moment") + "Test has an empty insert in it at the moment") def test_columns_single_inheritance_conflict_resolution(self): """Test that a declared_attr can return the existing column and it will be ignored. this allows conditional columns to be added. @@ -491,25 +499,29 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): id = Column(Integer, primary_key=True) class Engineer(Person): + """single table inheritance""" @declared_attr def target_id(cls): - return cls.__table__.c.get('target_id', - Column(Integer, ForeignKey('other.id')) - ) + return cls.__table__.c.get( + 'target_id', + Column(Integer, ForeignKey('other.id'))) + @declared_attr def target(cls): return relationship("Other") class Manager(Person): + """single table inheritance""" @declared_attr def target_id(cls): - return cls.__table__.c.get('target_id', - Column(Integer, ForeignKey('other.id')) - ) + return cls.__table__.c.get( + 'target_id', + Column(Integer, ForeignKey('other.id'))) + @declared_attr def target(cls): return relationship("Other") @@ -534,11 +546,10 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): Engineer(target=o1), Manager(target=o2), Manager(target=o1) - ]) + ]) session.commit() eq_(session.query(Engineer).first().target, o1) - def test_joined_from_single(self): class Company(Base, fixtures.ComparableEntity): @@ -595,12 +606,13 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): sess.expunge_all() eq_(sess.query(Person).with_polymorphic(Engineer). filter(Engineer.primary_language - == 'cobol').first(), Engineer(name='vlad')) + == 'cobol').first(), Engineer(name='vlad')) eq_(sess.query(Company).filter(Company.employees.of_type(Engineer). - any(Engineer.primary_language - == 'cobol')).first(), c2) + any(Engineer.primary_language + == 'cobol')).first(), c2) eq_(sess.query(Engineer).filter_by(primary_language='cobol' - ).one(), Engineer(name='vlad', primary_language='cobol')) + ).one(), + Engineer(name='vlad', primary_language='cobol')) def test_single_from_joined_colsonsub(self): class Person(Base, fixtures.ComparableEntity): @@ -661,7 +673,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): eq_(sess.query(Person).all(), [Person(name='ratbert')]) sess.expunge_all() person = sess.query(Person).filter(Person.name == 'ratbert' - ).one() + ).one() assert 'name' not in person.__dict__ def test_single_fksonsub(self): @@ -683,7 +695,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): __mapper_args__ = {'polymorphic_identity': 'engineer'} primary_language_id = Column(Integer, - ForeignKey('languages.id')) + ForeignKey('languages.id')) primary_language = relationship('Language') class Language(Base, fixtures.ComparableEntity): @@ -706,19 +718,19 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): sess.expunge_all() eq_(sess.query(Person).filter(Engineer.primary_language.has( Language.name - == 'cobol')).first(), Engineer(name='vlad', - primary_language=Language(name='cobol'))) + == 'cobol')).first(), + Engineer(name='vlad', primary_language=Language(name='cobol'))) eq_(sess.query(Engineer).filter(Engineer.primary_language.has( Language.name - == 'cobol')).one(), Engineer(name='vlad', - primary_language=Language(name='cobol'))) + == 'cobol')).one(), + Engineer(name='vlad', primary_language=Language(name='cobol'))) eq_(sess.query(Person).join(Engineer.primary_language).order_by( Language.name).all(), [Engineer(name='vlad', - primary_language=Language(name='cobol')), - Engineer(name='wally', primary_language=Language(name='cpp' - )), Engineer(name='dilbert', - primary_language=Language(name='java'))]) + primary_language=Language(name='cobol')), + Engineer(name='wally', primary_language=Language(name='cpp' + )), + Engineer(name='dilbert', primary_language=Language(name='java'))]) def test_single_three_levels(self): @@ -810,11 +822,11 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): __mapper_args__ = {'polymorphic_identity': 'engineer'} primary_language = Column('primary_language', - String(50)) + String(50)) foo_bar = Column(Integer, primary_key=True) assert_raises_message(sa.exc.ArgumentError, - 'place primary key', go) + 'place primary key', go) def test_single_no_table_args(self): @@ -832,7 +844,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): __mapper_args__ = {'polymorphic_identity': 'engineer'} primary_language = Column('primary_language', - String(50)) + String(50)) # this should be on the Person class, as this is single # table inheritance, which is why we test that this @@ -849,6 +861,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): __tablename__ = "a" id = Column(Integer, primary_key=True) a_1 = A + class A(a_1): __tablename__ = 'b' id = Column(Integer(), ForeignKey(a_1.id), primary_key=True) @@ -857,6 +870,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): class OverlapColPrecedenceTest(DeclarativeTestBase): + """test #1892 cases when declarative does column precedence.""" def _run_test(self, Engineer, e_id, p_id): @@ -895,7 +909,7 @@ class OverlapColPrecedenceTest(DeclarativeTestBase): class Engineer(Person): __tablename__ = 'engineer' id = Column("eid", Integer, ForeignKey('person.id'), - primary_key=True) + primary_key=True) self._run_test(Engineer, "eid", "id") @@ -907,15 +921,18 @@ class OverlapColPrecedenceTest(DeclarativeTestBase): class Engineer(Person): __tablename__ = 'engineer' id = Column("eid", Integer, ForeignKey('person.pid'), - primary_key=True) + primary_key=True) self._run_test(Engineer, "eid", "pid") from test.orm.test_events import _RemoveListeners + + class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase): + def _roundtrip(self, Employee, Manager, Engineer, Boss, - polymorphic=True, explicit_type=False): + polymorphic=True, explicit_type=False): Base.metadata.create_all() sess = create_session() e1 = Engineer(name='dilbert', primary_language='java') @@ -932,7 +949,7 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase): assert_raises_message( AttributeError, "does not implement attribute .?'type' " - "at the instance level.", + "at the instance level.", getattr, obj, "type" ) else: @@ -946,37 +963,38 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase): if polymorphic: eq_(sess.query(Employee).order_by(Employee.name).all(), [Engineer(name='dilbert'), Manager(name='dogbert'), - Boss(name='pointy haired'), Engineer(name='vlad'), Engineer(name='wally')]) + Boss(name='pointy haired'), + Engineer(name='vlad'), Engineer(name='wally')]) else: eq_(sess.query(Engineer).order_by(Engineer.name).all(), [Engineer(name='dilbert'), Engineer(name='vlad'), - Engineer(name='wally')]) + Engineer(name='wally')]) eq_(sess.query(Manager).all(), [Manager(name='dogbert')]) eq_(sess.query(Boss).all(), [Boss(name='pointy haired')]) - def test_explicit(self): - engineers = Table('engineers', Base.metadata, Column('id', - Integer, primary_key=True, - test_needs_autoincrement=True), - Column('name', String(50)), - Column('primary_language', String(50))) + engineers = Table( + 'engineers', Base.metadata, + Column('id', + Integer, primary_key=True, test_needs_autoincrement=True), + Column('name', String(50)), + Column('primary_language', String(50))) managers = Table('managers', Base.metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('name', String(50)), - Column('golf_swing', String(50)) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('name', String(50)), + Column('golf_swing', String(50)) + ) boss = Table('boss', Base.metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('name', String(50)), - Column('golf_swing', String(50)) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('name', String(50)), + Column('golf_swing', String(50)) + ) punion = polymorphic_union({ - 'engineer': engineers, - 'manager': managers, - 'boss': boss}, 'type', 'punion') + 'engineer': engineers, + 'manager': managers, + 'boss': boss}, 'type', 'punion') class Employee(Base, fixtures.ComparableEntity): @@ -1047,31 +1065,31 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase): class Manager(Employee): __tablename__ = 'manager' employee_id = Column(Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) name = Column(String(50)) golf_swing = Column(String(40)) __mapper_args__ = { - 'polymorphic_identity': 'manager', - 'concrete': True} + 'polymorphic_identity': 'manager', + 'concrete': True} class Boss(Manager): __tablename__ = 'boss' employee_id = Column(Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) name = Column(String(50)) golf_swing = Column(String(40)) __mapper_args__ = { - 'polymorphic_identity': 'boss', - 'concrete': True} + 'polymorphic_identity': 'boss', + 'concrete': True} class Engineer(Employee): __tablename__ = 'engineer' employee_id = Column(Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) name = Column(String(50)) primary_language = Column(String(40)) __mapper_args__ = {'polymorphic_identity': 'engineer', - 'concrete': True} + 'concrete': True} self._roundtrip(Employee, Manager, Engineer, Boss) @@ -1079,42 +1097,42 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase): class Employee(ConcreteBase, Base, fixtures.ComparableEntity): __tablename__ = 'employee' employee_id = Column(Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) name = Column(String(50)) __mapper_args__ = { - 'polymorphic_identity': 'employee', - 'concrete': True} + 'polymorphic_identity': 'employee', + 'concrete': True} + class Manager(Employee): __tablename__ = 'manager' employee_id = Column(Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) name = Column(String(50)) golf_swing = Column(String(40)) __mapper_args__ = { - 'polymorphic_identity': 'manager', - 'concrete': True} + 'polymorphic_identity': 'manager', + 'concrete': True} class Boss(Manager): __tablename__ = 'boss' employee_id = Column(Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) name = Column(String(50)) golf_swing = Column(String(40)) __mapper_args__ = { - 'polymorphic_identity': 'boss', - 'concrete': True} + 'polymorphic_identity': 'boss', + 'concrete': True} class Engineer(Employee): __tablename__ = 'engineer' employee_id = Column(Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) name = Column(String(50)) primary_language = Column(String(40)) __mapper_args__ = {'polymorphic_identity': 'engineer', - 'concrete': True} + 'concrete': True} self._roundtrip(Employee, Manager, Engineer, Boss) - def test_has_inherited_table_doesnt_consider_base(self): class A(Base): __tablename__ = 'a' @@ -1140,7 +1158,7 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase): ret = { 'polymorphic_identity': 'default', 'polymorphic_on': cls.type, - } + } else: ret = {'polymorphic_identity': cls.__name__} return ret @@ -1161,7 +1179,7 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase): class Manager(Employee): __tablename__ = 'manager' employee_id = Column(Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) name = Column(String(50)) golf_swing = Column(String(40)) @@ -1170,13 +1188,13 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase): return "manager" __mapper_args__ = { - 'polymorphic_identity': "manager", - 'concrete': True} + 'polymorphic_identity': "manager", + 'concrete': True} class Boss(Manager): __tablename__ = 'boss' employee_id = Column(Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) name = Column(String(50)) golf_swing = Column(String(40)) @@ -1185,13 +1203,13 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase): return "boss" __mapper_args__ = { - 'polymorphic_identity': "boss", - 'concrete': True} + 'polymorphic_identity': "boss", + 'concrete': True} class Engineer(Employee): __tablename__ = 'engineer' employee_id = Column(Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) name = Column(String(50)) primary_language = Column(String(40)) @@ -1199,26 +1217,30 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase): def type(self): return "engineer" __mapper_args__ = {'polymorphic_identity': "engineer", - 'concrete': True} + 'concrete': True} self._roundtrip(Employee, Manager, Engineer, Boss, explicit_type=True) -class ConcreteExtensionConfigTest(_RemoveListeners, testing.AssertsCompiledSQL, DeclarativeTestBase): + +class ConcreteExtensionConfigTest( + _RemoveListeners, testing.AssertsCompiledSQL, DeclarativeTestBase): __dialect__ = 'default' def test_classreg_setup(self): class A(Base, fixtures.ComparableEntity): __tablename__ = 'a' - id = Column(Integer, primary_key=True, test_needs_autoincrement=True) + id = Column(Integer, + primary_key=True, test_needs_autoincrement=True) data = Column(String(50)) collection = relationship("BC", primaryjoin="BC.a_id == A.id", - collection_class=set) + collection_class=set) class BC(AbstractConcreteBase, Base, fixtures.ComparableEntity): pass class B(BC): __tablename__ = 'b' - id = Column(Integer, primary_key=True, test_needs_autoincrement=True) + id = Column(Integer, + primary_key=True, test_needs_autoincrement=True) a_id = Column(Integer, ForeignKey('a.id')) data = Column(String(50)) @@ -1230,7 +1252,8 @@ class ConcreteExtensionConfigTest(_RemoveListeners, testing.AssertsCompiledSQL, class C(BC): __tablename__ = 'c' - id = Column(Integer, primary_key=True, test_needs_autoincrement=True) + id = Column(Integer, + primary_key=True, test_needs_autoincrement=True) a_id = Column(Integer, ForeignKey('a.id')) data = Column(String(50)) c_data = Column(String(50)) @@ -1274,8 +1297,9 @@ class ConcreteExtensionConfigTest(_RemoveListeners, testing.AssertsCompiledSQL, sess.query(A).join(A.collection), "SELECT a.id AS a_id, a.data AS a_data FROM a JOIN " "(SELECT c.id AS id, c.a_id AS a_id, c.data AS data, " - "c.c_data AS c_data, CAST(NULL AS VARCHAR(50)) AS b_data, " - "'c' AS type FROM c UNION ALL SELECT b.id AS id, b.a_id AS a_id, " - "b.data AS data, CAST(NULL AS VARCHAR(50)) AS c_data, " - "b.b_data AS b_data, 'b' AS type FROM b) AS pjoin ON pjoin.a_id = a.id" + "c.c_data AS c_data, CAST(NULL AS VARCHAR(50)) AS b_data, " + "'c' AS type FROM c UNION ALL SELECT b.id AS id, b.a_id AS a_id, " + "b.data AS data, CAST(NULL AS VARCHAR(50)) AS c_data, " + "b.b_data AS b_data, 'b' AS type FROM b) AS pjoin " + "ON pjoin.a_id = a.id" ) diff --git a/test/ext/declarative/test_mixin.py b/test/ext/declarative/test_mixin.py index d3c2ff982..0d7cb7169 100644 --- a/test/ext/declarative/test_mixin.py +++ b/test/ext/declarative/test_mixin.py @@ -15,7 +15,9 @@ from sqlalchemy.testing import fixtures Base = None + class DeclarativeTestBase(fixtures.TestBase, testing.AssertsExecutionResults): + def setup(self): global Base Base = decl.declarative_base(testing.db) @@ -25,6 +27,7 @@ class DeclarativeTestBase(fixtures.TestBase, testing.AssertsExecutionResults): clear_mappers() Base.metadata.drop_all() + class DeclarativeMixinTest(DeclarativeTestBase): def test_simple(self): @@ -157,6 +160,7 @@ class DeclarativeMixinTest(DeclarativeTestBase): def test_table_name_inherited(self): class MyMixin: + @declared_attr def __tablename__(cls): return cls.__name__.lower() @@ -169,6 +173,7 @@ class DeclarativeMixinTest(DeclarativeTestBase): def test_classproperty_still_works(self): class MyMixin(object): + @classproperty def __tablename__(cls): return cls.__name__.lower() @@ -182,6 +187,7 @@ class DeclarativeMixinTest(DeclarativeTestBase): def test_table_name_not_inherited(self): class MyMixin: + @declared_attr def __tablename__(cls): return cls.__name__.lower() @@ -195,11 +201,13 @@ class DeclarativeMixinTest(DeclarativeTestBase): def test_table_name_inheritance_order(self): class MyMixin1: + @declared_attr def __tablename__(cls): return cls.__name__.lower() + '1' class MyMixin2: + @declared_attr def __tablename__(cls): return cls.__name__.lower() + '2' @@ -212,6 +220,7 @@ class DeclarativeMixinTest(DeclarativeTestBase): def test_table_name_dependent_on_subclass(self): class MyHistoryMixin: + @declared_attr def __tablename__(cls): return cls.parent_name + '_changelog' @@ -236,6 +245,7 @@ class DeclarativeMixinTest(DeclarativeTestBase): def test_table_args_inherited_descriptor(self): class MyMixin: + @declared_attr def __table_args__(cls): return {'info': cls.__name__} @@ -289,7 +299,7 @@ class DeclarativeMixinTest(DeclarativeTestBase): assert Specific.bar.prop is General.bar.prop @testing.skip_if(lambda: testing.against('oracle'), - "Test has an empty insert in it at the moment") + "Test has an empty insert in it at the moment") def test_columns_single_inheritance_conflict_resolution(self): """Test that a declared_attr can return the existing column and it will be ignored. this allows conditional columns to be added. @@ -302,20 +312,24 @@ class DeclarativeMixinTest(DeclarativeTestBase): id = Column(Integer, primary_key=True) class Mixin(object): + @declared_attr def target_id(cls): - return cls.__table__.c.get('target_id', - Column(Integer, ForeignKey('other.id')) - ) + return cls.__table__.c.get( + 'target_id', + Column(Integer, ForeignKey('other.id')) + ) @declared_attr def target(cls): return relationship("Other") class Engineer(Mixin, Person): + """single table inheritance""" class Manager(Mixin, Person): + """single table inheritance""" class Other(Base): @@ -338,11 +352,10 @@ class DeclarativeMixinTest(DeclarativeTestBase): Engineer(target=o1), Manager(target=o2), Manager(target=o1) - ]) + ]) session.commit() eq_(session.query(Engineer).first().target, o1) - def test_columns_joined_table_inheritance(self): """Test a column on a mixin with an alternate attribute name, mapped to a superclass and joined-table inheritance subclass. @@ -428,6 +441,7 @@ class DeclarativeMixinTest(DeclarativeTestBase): def test_mapper_args_declared_attr(self): class ComputedMapperArgs: + @declared_attr def __mapper_args__(cls): if cls.__name__ == 'Person': @@ -454,6 +468,7 @@ class DeclarativeMixinTest(DeclarativeTestBase): # ComputedMapperArgs on both classes for no apparent reason. class ComputedMapperArgs: + @declared_attr def __mapper_args__(cls): if cls.__name__ == 'Person': @@ -612,7 +627,7 @@ class DeclarativeMixinTest(DeclarativeTestBase): @declared_attr def __table_args__(cls): - return {'mysql_engine':'InnoDB'} + return {'mysql_engine': 'InnoDB'} @declared_attr def __mapper_args__(cls): @@ -640,13 +655,14 @@ class DeclarativeMixinTest(DeclarativeTestBase): """test the @declared_attr approach from a custom base.""" class Base(object): + @declared_attr def __tablename__(cls): return cls.__name__.lower() @declared_attr def __table_args__(cls): - return {'mysql_engine':'InnoDB'} + return {'mysql_engine': 'InnoDB'} @declared_attr def id(self): @@ -714,7 +730,7 @@ class DeclarativeMixinTest(DeclarativeTestBase): eq_(Generic.__table__.name, 'generic') eq_(Specific.__table__.name, 'specific') eq_(list(Generic.__table__.c.keys()), ['timestamp', 'id', - 'python_type']) + 'python_type']) eq_(list(Specific.__table__.c.keys()), ['id']) eq_(Generic.__table__.kwargs, {'mysql_engine': 'InnoDB'}) eq_(Specific.__table__.kwargs, {'mysql_engine': 'InnoDB'}) @@ -749,7 +765,7 @@ class DeclarativeMixinTest(DeclarativeTestBase): eq_(BaseType.__table__.name, 'basetype') eq_(list(BaseType.__table__.c.keys()), ['timestamp', 'type', 'id', - 'value']) + 'value']) eq_(BaseType.__table__.kwargs, {'mysql_engine': 'InnoDB'}) assert Single.__table__ is BaseType.__table__ eq_(Joined.__table__.name, 'joined') @@ -851,7 +867,7 @@ class DeclarativeMixinTest(DeclarativeTestBase): @declared_attr def __tablename__(cls): if decl.has_inherited_table(cls) and TableNameMixin \ - not in cls.__bases__: + not in cls.__bases__: return None return cls.__name__.lower() @@ -900,9 +916,9 @@ class DeclarativeMixinTest(DeclarativeTestBase): class Model(Base, ColumnMixin): - __table__ = Table('foo', Base.metadata, Column('data', - Integer), Column('id', Integer, - primary_key=True)) + __table__ = Table('foo', Base.metadata, + Column('data', Integer), + Column('id', Integer, primary_key=True)) model_col = Model.__table__.c.data mixin_col = ColumnMixin.data @@ -920,8 +936,8 @@ class DeclarativeMixinTest(DeclarativeTestBase): class Model(Base, ColumnMixin): __table__ = Table('foo', Base.metadata, - Column('data',Integer), - Column('id', Integer,primary_key=True)) + Column('data', Integer), + Column('id', Integer, primary_key=True)) foo = relationship("Dest") assert_raises_message(sa.exc.ArgumentError, @@ -942,9 +958,9 @@ class DeclarativeMixinTest(DeclarativeTestBase): class Model(Base, ColumnMixin): __table__ = Table('foo', Base.metadata, - Column('data',Integer), - Column('tada', Integer), - Column('id', Integer,primary_key=True)) + Column('data', Integer), + Column('tada', Integer), + Column('id', Integer, primary_key=True)) foo = relationship("Dest") assert_raises_message(sa.exc.ArgumentError, @@ -959,9 +975,9 @@ class DeclarativeMixinTest(DeclarativeTestBase): class Model(Base, ColumnMixin): - __table__ = Table('foo', Base.metadata, Column('data', - Integer), Column('id', Integer, - primary_key=True)) + __table__ = Table('foo', Base.metadata, + Column('data', Integer), + Column('id', Integer, primary_key=True)) model_col = Model.__table__.c.data mixin_col = ColumnMixin.data @@ -987,10 +1003,11 @@ class DeclarativeMixinTest(DeclarativeTestBase): __tablename__ = 'model' eq_(list(Model.__table__.c.keys()), ['col1', 'col3', 'col2', 'col4', - 'id']) + 'id']) def test_honor_class_mro_one(self): class HasXMixin(object): + @declared_attr def x(self): return Column(Integer) @@ -1007,6 +1024,7 @@ class DeclarativeMixinTest(DeclarativeTestBase): def test_honor_class_mro_two(self): class HasXMixin(object): + @declared_attr def x(self): return Column(Integer) @@ -1014,6 +1032,7 @@ class DeclarativeMixinTest(DeclarativeTestBase): class Parent(HasXMixin, Base): __tablename__ = 'parent' id = Column(Integer, primary_key=True) + def x(self): return "hi" @@ -1025,6 +1044,7 @@ class DeclarativeMixinTest(DeclarativeTestBase): def test_arbitrary_attrs_one(self): class HasMixin(object): + @declared_attr def some_attr(cls): return cls.__name__ + "SOME ATTR" @@ -1043,8 +1063,9 @@ class DeclarativeMixinTest(DeclarativeTestBase): __tablename__ = 'filter_a' id = Column(Integer(), primary_key=True) parent_id = Column(Integer(), - ForeignKey('type_a.id')) + ForeignKey('type_a.id')) filter = Column(String()) + def __init__(self, filter_, **kw): self.filter = filter_ @@ -1052,16 +1073,18 @@ class DeclarativeMixinTest(DeclarativeTestBase): __tablename__ = 'filter_b' id = Column(Integer(), primary_key=True) parent_id = Column(Integer(), - ForeignKey('type_b.id')) + ForeignKey('type_b.id')) filter = Column(String()) + def __init__(self, filter_, **kw): self.filter = filter_ class FilterMixin(object): + @declared_attr def _filters(cls): return relationship(cls.filter_class, - cascade='all,delete,delete-orphan') + cascade='all,delete,delete-orphan') @declared_attr def filters(cls): @@ -1080,6 +1103,7 @@ class DeclarativeMixinTest(DeclarativeTestBase): TypeA(filters=['foo']) TypeB(filters=['foo']) + class DeclarativeMixinPropertyTest(DeclarativeTestBase): def test_column_property(self): @@ -1118,9 +1142,9 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase): sess.add_all([m1, m2]) sess.flush() eq_(sess.query(MyModel).filter(MyModel.prop_hoho == 'foo' - ).one(), m1) + ).one(), m1) eq_(sess.query(MyOtherModel).filter(MyOtherModel.prop_hoho - == 'bar').one(), m2) + == 'bar').one(), m2) def test_doc(self): """test documentation transfer. @@ -1198,7 +1222,6 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase): ModelTwo.__table__.c.version_id ) - def test_deferred(self): class MyMixin(object): @@ -1235,8 +1258,8 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase): @declared_attr def target(cls): return relationship('Target', - primaryjoin='Target.id==%s.target_id' - % cls.__name__) + primaryjoin='Target.id==%s.target_id' + % cls.__name__) else: @declared_attr @@ -1280,6 +1303,7 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase): class AbstractTest(DeclarativeTestBase): + def test_abstract_boolean(self): class A(Base): diff --git a/test/ext/declarative/test_reflection.py b/test/ext/declarative/test_reflection.py index f4bda6995..c7f7bc05d 100644 --- a/test/ext/declarative/test_reflection.py +++ b/test/ext/declarative/test_reflection.py @@ -1,7 +1,7 @@ from sqlalchemy.testing import eq_, assert_raises from sqlalchemy.ext import declarative as decl from sqlalchemy import testing -from sqlalchemy import MetaData, Integer, String, ForeignKey +from sqlalchemy import Integer, String, ForeignKey from sqlalchemy.testing.schema import Table, Column from sqlalchemy.orm import relationship, create_session, \ clear_mappers, \ @@ -10,6 +10,7 @@ from sqlalchemy.testing import fixtures from sqlalchemy.testing.util import gc_collect from sqlalchemy.ext.declarative.base import _DeferredMapperConfig + class DeclarativeReflectionBase(fixtures.TablesTest): __requires__ = 'reflectable_autoincrement', @@ -21,13 +22,14 @@ class DeclarativeReflectionBase(fixtures.TablesTest): super(DeclarativeReflectionBase, self).teardown() clear_mappers() + class DeclarativeReflectionTest(DeclarativeReflectionBase): @classmethod def define_tables(cls, metadata): Table('users', metadata, - Column('id', Integer, - primary_key=True, test_needs_autoincrement=True), + Column('id', Integer, + primary_key=True, test_needs_autoincrement=True), Column('name', String(50)), test_needs_fk=True) Table( 'addresses', @@ -37,7 +39,7 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase): Column('email', String(50)), Column('user_id', Integer, ForeignKey('users.id')), test_needs_fk=True, - ) + ) Table( 'imhandles', metadata, @@ -47,8 +49,7 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase): Column('network', String(50)), Column('handle', String(50)), test_needs_fk=True, - ) - + ) def test_basic(self): class User(Base, fixtures.ComparableEntity): @@ -69,13 +70,14 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase): test_needs_autoincrement=True) u1 = User(name='u1', addresses=[Address(email='one'), - Address(email='two')]) + Address(email='two')]) sess = create_session() sess.add(u1) sess.flush() sess.expunge_all() - eq_(sess.query(User).all(), [User(name='u1', - addresses=[Address(email='one'), Address(email='two')])]) + eq_(sess.query(User).all(), [ + User(name='u1', + addresses=[Address(email='one'), Address(email='two')])]) a1 = sess.query(Address).filter(Address.email == 'two').one() eq_(a1, Address(email='two')) eq_(a1.user, User(name='u1')) @@ -100,13 +102,14 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase): test_needs_autoincrement=True) u1 = User(nom='u1', addresses=[Address(email='one'), - Address(email='two')]) + Address(email='two')]) sess = create_session() sess.add(u1) sess.flush() sess.expunge_all() - eq_(sess.query(User).all(), [User(nom='u1', - addresses=[Address(email='one'), Address(email='two')])]) + eq_(sess.query(User).all(), [ + User(nom='u1', + addresses=[Address(email='one'), Address(email='two')])]) a1 = sess.query(Address).filter(Address.email == 'two').one() eq_(a1, Address(email='two')) eq_(a1.user, User(nom='u1')) @@ -131,61 +134,66 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase): test_needs_autoincrement=True) handles = relationship('IMHandle', backref='user') - u1 = User(name='u1', handles=[IMHandle(network='blabber', - handle='foo'), IMHandle(network='lol', handle='zomg' - )]) + u1 = User(name='u1', handles=[ + IMHandle(network='blabber', handle='foo'), + IMHandle(network='lol', handle='zomg')]) sess = create_session() sess.add(u1) sess.flush() sess.expunge_all() - eq_(sess.query(User).all(), [User(name='u1', - handles=[IMHandle(network='blabber', handle='foo'), - IMHandle(network='lol', handle='zomg')])]) + eq_(sess.query(User).all(), [ + User(name='u1', handles=[IMHandle(network='blabber', handle='foo'), + IMHandle(network='lol', handle='zomg')])]) a1 = sess.query(IMHandle).filter(IMHandle.handle == 'zomg' - ).one() + ).one() eq_(a1, IMHandle(network='lol', handle='zomg')) eq_(a1.user, User(name='u1')) + class DeferredReflectBase(DeclarativeReflectionBase): + def teardown(self): super(DeferredReflectBase, self).teardown() _DeferredMapperConfig._configs.clear() Base = None + class DeferredReflectPKFKTest(DeferredReflectBase): + @classmethod def define_tables(cls, metadata): Table("a", metadata, - Column('id', Integer, - primary_key=True, test_needs_autoincrement=True), - ) + Column('id', Integer, + primary_key=True, test_needs_autoincrement=True), + ) Table("b", metadata, - Column('id', Integer, - ForeignKey('a.id'), - primary_key=True), - Column('x', Integer, primary_key=True) - ) + Column('id', Integer, + ForeignKey('a.id'), + primary_key=True), + Column('x', Integer, primary_key=True) + ) def test_pk_fk(self): class B(decl.DeferredReflection, fixtures.ComparableEntity, - Base): + Base): __tablename__ = 'b' a = relationship("A") class A(decl.DeferredReflection, fixtures.ComparableEntity, - Base): + Base): __tablename__ = 'a' decl.DeferredReflection.prepare(testing.db) + class DeferredReflectionTest(DeferredReflectBase): @classmethod def define_tables(cls, metadata): Table('users', metadata, - Column('id', Integer, - primary_key=True, test_needs_autoincrement=True), + Column('id', Integer, + primary_key=True, test_needs_autoincrement=True), Column('name', String(50)), test_needs_fk=True) Table( 'addresses', @@ -195,7 +203,7 @@ class DeferredReflectionTest(DeferredReflectBase): Column('email', String(50)), Column('user_id', Integer, ForeignKey('users.id')), test_needs_fk=True, - ) + ) def _roundtrip(self): @@ -203,25 +211,26 @@ class DeferredReflectionTest(DeferredReflectBase): Address = Base._decl_class_registry['Address'] u1 = User(name='u1', addresses=[Address(email='one'), - Address(email='two')]) + Address(email='two')]) sess = create_session() sess.add(u1) sess.flush() sess.expunge_all() - eq_(sess.query(User).all(), [User(name='u1', - addresses=[Address(email='one'), Address(email='two')])]) + eq_(sess.query(User).all(), [ + User(name='u1', + addresses=[Address(email='one'), Address(email='two')])]) a1 = sess.query(Address).filter(Address.email == 'two').one() eq_(a1, Address(email='two')) eq_(a1.user, User(name='u1')) def test_basic_deferred(self): class User(decl.DeferredReflection, fixtures.ComparableEntity, - Base): + Base): __tablename__ = 'users' addresses = relationship("Address", backref="user") class Address(decl.DeferredReflection, fixtures.ComparableEntity, - Base): + Base): __tablename__ = 'addresses' decl.DeferredReflection.prepare(testing.db) @@ -249,12 +258,12 @@ class DeferredReflectionTest(DeferredReflectBase): def test_redefine_fk_double(self): class User(decl.DeferredReflection, fixtures.ComparableEntity, - Base): + Base): __tablename__ = 'users' addresses = relationship("Address", backref="user") class Address(decl.DeferredReflection, fixtures.ComparableEntity, - Base): + Base): __tablename__ = 'addresses' user_id = Column(Integer, ForeignKey('users.id')) @@ -262,10 +271,11 @@ class DeferredReflectionTest(DeferredReflectBase): self._roundtrip() def test_mapper_args_deferred(self): - """test that __mapper_args__ is not called until *after* table reflection""" + """test that __mapper_args__ is not called until *after* + table reflection""" class User(decl.DeferredReflection, fixtures.ComparableEntity, - Base): + Base): __tablename__ = 'users' @decl.declared_attr @@ -296,10 +306,11 @@ class DeferredReflectionTest(DeferredReflectBase): @testing.requires.predictable_gc def test_cls_not_strong_ref(self): class User(decl.DeferredReflection, fixtures.ComparableEntity, - Base): + Base): __tablename__ = 'users' + class Address(decl.DeferredReflection, fixtures.ComparableEntity, - Base): + Base): __tablename__ = 'addresses' eq_(len(_DeferredMapperConfig._configs), 2) del Address @@ -308,26 +319,28 @@ class DeferredReflectionTest(DeferredReflectBase): decl.DeferredReflection.prepare(testing.db) assert not _DeferredMapperConfig._configs + class DeferredSecondaryReflectionTest(DeferredReflectBase): + @classmethod def define_tables(cls, metadata): Table('users', metadata, - Column('id', Integer, - primary_key=True, test_needs_autoincrement=True), + Column('id', Integer, + primary_key=True, test_needs_autoincrement=True), Column('name', String(50)), test_needs_fk=True) Table('user_items', metadata, - Column('user_id', ForeignKey('users.id'), primary_key=True), - Column('item_id', ForeignKey('items.id'), primary_key=True), - test_needs_fk=True - ) + Column('user_id', ForeignKey('users.id'), primary_key=True), + Column('item_id', ForeignKey('items.id'), primary_key=True), + test_needs_fk=True + ) Table('items', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('name', String(50)), - test_needs_fk=True - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('name', String(50)), + test_needs_fk=True + ) def _roundtrip(self): @@ -340,8 +353,8 @@ class DeferredSecondaryReflectionTest(DeferredReflectBase): sess.add(u1) sess.commit() - eq_(sess.query(User).all(), [User(name='u1', - items=[Item(name='i1'), Item(name='i2')])]) + eq_(sess.query(User).all(), [ + User(name='u1', items=[Item(name='i1'), Item(name='i2')])]) def test_string_resolution(self): class User(decl.DeferredReflection, fixtures.ComparableEntity, Base): @@ -359,7 +372,8 @@ class DeferredSecondaryReflectionTest(DeferredReflectBase): class User(decl.DeferredReflection, fixtures.ComparableEntity, Base): __tablename__ = 'users' - items = relationship("Item", secondary=Table("user_items", Base.metadata)) + items = relationship("Item", + secondary=Table("user_items", Base.metadata)) class Item(decl.DeferredReflection, fixtures.ComparableEntity, Base): __tablename__ = 'items' @@ -367,7 +381,9 @@ class DeferredSecondaryReflectionTest(DeferredReflectBase): decl.DeferredReflection.prepare(testing.db) self._roundtrip() + class DeferredInhReflectBase(DeferredReflectBase): + def _roundtrip(self): Foo = Base._decl_class_registry['Foo'] Bar = Base._decl_class_registry['Bar'] @@ -392,24 +408,25 @@ class DeferredInhReflectBase(DeferredReflectBase): ] ) + class DeferredSingleInhReflectionTest(DeferredInhReflectBase): @classmethod def define_tables(cls, metadata): Table("foo", metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('type', String(32)), - Column('data', String(30)), - Column('bar_data', String(30)) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('type', String(32)), + Column('data', String(30)), + Column('bar_data', String(30)) + ) def test_basic(self): class Foo(decl.DeferredReflection, fixtures.ComparableEntity, - Base): + Base): __tablename__ = 'foo' __mapper_args__ = {"polymorphic_on": "type", - "polymorphic_identity": "foo"} + "polymorphic_identity": "foo"} class Bar(Foo): __mapper_args__ = {"polymorphic_identity": "bar"} @@ -419,10 +436,10 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase): def test_add_subclass_column(self): class Foo(decl.DeferredReflection, fixtures.ComparableEntity, - Base): + Base): __tablename__ = 'foo' __mapper_args__ = {"polymorphic_on": "type", - "polymorphic_identity": "foo"} + "polymorphic_identity": "foo"} class Bar(Foo): __mapper_args__ = {"polymorphic_identity": "bar"} @@ -433,10 +450,10 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase): def test_add_pk_column(self): class Foo(decl.DeferredReflection, fixtures.ComparableEntity, - Base): + Base): __tablename__ = 'foo' __mapper_args__ = {"polymorphic_on": "type", - "polymorphic_identity": "foo"} + "polymorphic_identity": "foo"} id = Column(Integer, primary_key=True) class Bar(Foo): @@ -445,28 +462,30 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase): decl.DeferredReflection.prepare(testing.db) self._roundtrip() + class DeferredJoinedInhReflectionTest(DeferredInhReflectBase): + @classmethod def define_tables(cls, metadata): Table("foo", metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('type', String(32)), - Column('data', String(30)), - test_needs_fk=True, - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('type', String(32)), + Column('data', String(30)), + test_needs_fk=True, + ) Table('bar', metadata, - Column('id', Integer, ForeignKey('foo.id'), primary_key=True), - Column('bar_data', String(30)), - test_needs_fk=True, - ) + Column('id', Integer, ForeignKey('foo.id'), primary_key=True), + Column('bar_data', String(30)), + test_needs_fk=True, + ) def test_basic(self): class Foo(decl.DeferredReflection, fixtures.ComparableEntity, - Base): + Base): __tablename__ = 'foo' __mapper_args__ = {"polymorphic_on": "type", - "polymorphic_identity": "foo"} + "polymorphic_identity": "foo"} class Bar(Foo): __tablename__ = 'bar' @@ -477,10 +496,10 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase): def test_add_subclass_column(self): class Foo(decl.DeferredReflection, fixtures.ComparableEntity, - Base): + Base): __tablename__ = 'foo' __mapper_args__ = {"polymorphic_on": "type", - "polymorphic_identity": "foo"} + "polymorphic_identity": "foo"} class Bar(Foo): __tablename__ = 'bar' @@ -492,10 +511,10 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase): def test_add_pk_column(self): class Foo(decl.DeferredReflection, fixtures.ComparableEntity, - Base): + Base): __tablename__ = 'foo' __mapper_args__ = {"polymorphic_on": "type", - "polymorphic_identity": "foo"} + "polymorphic_identity": "foo"} id = Column(Integer, primary_key=True) class Bar(Foo): @@ -507,10 +526,10 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase): def test_add_fk_pk_column(self): class Foo(decl.DeferredReflection, fixtures.ComparableEntity, - Base): + Base): __tablename__ = 'foo' __mapper_args__ = {"polymorphic_on": "type", - "polymorphic_identity": "foo"} + "polymorphic_identity": "foo"} class Bar(Foo): __tablename__ = 'bar' -- cgit v1.2.1 From d5ecd473aeacbca7faada47b089c283d03a78fe7 Mon Sep 17 00:00:00 2001 From: ndparker Date: Tue, 23 Sep 2014 22:52:14 +0200 Subject: allow Table.tometadata changing the table name --- lib/sqlalchemy/sql/schema.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index d9fd37f92..094c40947 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -728,7 +728,7 @@ class Table(DialectKWArgs, SchemaItem, TableClause): checkfirst=checkfirst) def tometadata(self, metadata, schema=RETAIN_SCHEMA, - referred_schema_fn=None): + referred_schema_fn=None, name=None): """Return a copy of this :class:`.Table` associated with a different :class:`.MetaData`. @@ -785,13 +785,16 @@ class Table(DialectKWArgs, SchemaItem, TableClause): .. versionadded:: 0.9.2 + :param name: optional string name indicating the target table name. + If not specified or None, the table name is retained. """ - + if name is None: + name = self.name if schema is RETAIN_SCHEMA: schema = self.schema elif schema is None: schema = metadata.schema - key = _get_table_key(self.name, schema) + key = _get_table_key(name, schema) if key in metadata.tables: util.warn("Table '%s' already exists within the given " "MetaData - not copying." % self.description) @@ -801,7 +804,7 @@ class Table(DialectKWArgs, SchemaItem, TableClause): for c in self.columns: args.append(c.copy(schema=schema)) table = Table( - self.name, metadata, schema=schema, + name, metadata, schema=schema, *args, **self.kwargs ) for c in self.constraints: -- cgit v1.2.1 From ce52dd9e3b71f2074d7821fe62803d4e0eefe512 Mon Sep 17 00:00:00 2001 From: ndparker Date: Tue, 23 Sep 2014 23:28:11 +0200 Subject: improve exception vs. exit handling --- lib/sqlalchemy/dialects/mssql/base.py | 2 ++ lib/sqlalchemy/dialects/mysql/base.py | 4 ++++ lib/sqlalchemy/dialects/mysql/mysqlconnector.py | 2 ++ lib/sqlalchemy/engine/base.py | 2 ++ lib/sqlalchemy/orm/mapper.py | 2 ++ lib/sqlalchemy/orm/state.py | 4 ++-- lib/sqlalchemy/pool.py | 8 ++++---- lib/sqlalchemy/sql/elements.py | 2 ++ lib/sqlalchemy/sql/schema.py | 4 ++-- lib/sqlalchemy/testing/provision.py | 10 ++++++++++ lib/sqlalchemy/util/langhelpers.py | 6 ++++++ 11 files changed, 38 insertions(+), 8 deletions(-) diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index ba3050ae5..ade2d00cb 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -846,6 +846,8 @@ class MSExecutionContext(default.DefaultExecutionContext): "SET IDENTITY_INSERT %s OFF" % self.dialect.identifier_preparer. format_table( self.compiled.statement.table))) + except (SystemExit, KeyboardInterrupt): + raise except: pass diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 7ccd59abb..0994e2416 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -2317,6 +2317,8 @@ class MySQLDialect(default.DefaultDialect): # basic operations via autocommit fail. try: dbapi_connection.commit() + except (SystemExit, KeyboardInterrupt): + raise except: if self.server_version_info < (3, 23, 15): args = sys.exc_info()[1].args @@ -2329,6 +2331,8 @@ class MySQLDialect(default.DefaultDialect): try: dbapi_connection.rollback() + except (SystemExit, KeyboardInterrupt): + raise except: if self.server_version_info < (3, 23, 15): args = sys.exc_info()[1].args diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py index e51e80005..afa61d85b 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -103,6 +103,8 @@ class MySQLDialect_mysqlconnector(MySQLDialect): 'client_flags', ClientFlag.get_default()) client_flags |= ClientFlag.FOUND_ROWS opts['client_flags'] = client_flags + except (SystemExit, KeyboardInterrupt): + raise except: pass return [[], opts] diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index d2cc8890f..b3460c240 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -1135,6 +1135,8 @@ class Connection(Connectable): per_fn = fn(ctx) if per_fn is not None: ctx.chained_exception = newraise = per_fn + except (SystemExit, KeyboardInterrupt): + raise except Exception as _raised: # handler raises an exception - stop processing newraise = _raised diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index a59a38a5b..bd28975dd 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -2649,6 +2649,8 @@ def configure_mappers(): mapper._expire_memoizations() mapper.dispatch.mapper_configured( mapper, mapper.class_) + except (SystemExit, KeyboardInterrupt): + raise except: exc = sys.exc_info()[1] if not hasattr(exc, '_configure_failed'): diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py index 3c12fda1a..4756f1707 100644 --- a/lib/sqlalchemy/orm/state.py +++ b/lib/sqlalchemy/orm/state.py @@ -258,8 +258,8 @@ class InstanceState(interfaces.InspectionAttr): try: return manager.original_init(*mixed[1:], **kwargs) except: - manager.dispatch.init_failure(self, args, kwargs) - raise + with util.safe_reraise(): + manager.dispatch.init_failure(self, args, kwargs) def get_history(self, key, passive): return self.manager[key].impl.get_history(self, self.dict, passive) diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py index bc9affe4a..0c162e984 100644 --- a/lib/sqlalchemy/pool.py +++ b/lib/sqlalchemy/pool.py @@ -441,8 +441,8 @@ class _ConnectionRecord(object): try: dbapi_connection = rec.get_connection() except: - rec.checkin() - raise + with util.safe_reraise(): + rec.checkin() echo = pool._should_log_debug() fairy = _ConnectionFairy(dbapi_connection, rec, echo) rec.fairy_ref = weakref.ref( @@ -962,8 +962,8 @@ class QueuePool(Pool): try: return self._create_connection() except: - self._dec_overflow() - raise + with util.safe_reraise(): + self._dec_overflow() else: return self._do_get() diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 8ec0aa700..8e18a22fe 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -3491,6 +3491,8 @@ def _string_or_unprintable(element): else: try: return str(element) + except (SystemExit, KeyboardInterrupt): + raise except: return "unprintable element %r" % element diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index d9fd37f92..9afc31be8 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -412,8 +412,8 @@ class Table(DialectKWArgs, SchemaItem, TableClause): table.dispatch.after_parent_attach(table, metadata) return table except: - metadata._remove_table(name, schema) - raise + with util.safe_reraise(): + metadata._remove_table(name, schema) @property @util.deprecated('0.9', 'Use ``table.schema.quote``') diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py index 0bcdad959..64688d6b5 100644 --- a/lib/sqlalchemy/testing/provision.py +++ b/lib/sqlalchemy/testing/provision.py @@ -120,6 +120,8 @@ def _pg_create_db(cfg, eng, ident): isolation_level="AUTOCOMMIT") as conn: try: _pg_drop_db(cfg, conn, ident) + except (SystemExit, KeyboardInterrupt): + raise except: pass currentdb = conn.scalar("select current_database()") @@ -131,6 +133,8 @@ def _mysql_create_db(cfg, eng, ident): with eng.connect() as conn: try: _mysql_drop_db(cfg, conn, ident) + except (SystemExit, KeyboardInterrupt): + raise except: pass conn.execute("CREATE DATABASE %s" % ident) @@ -173,14 +177,20 @@ def _mysql_drop_db(cfg, eng, ident): with eng.connect() as conn: try: conn.execute("DROP DATABASE %s_test_schema" % ident) + except (SystemExit, KeyboardInterrupt): + raise except: pass try: conn.execute("DROP DATABASE %s_test_schema_2" % ident) + except (SystemExit, KeyboardInterrupt): + raise except: pass try: conn.execute("DROP DATABASE %s" % ident) + except (SystemExit, KeyboardInterrupt): + raise except: pass diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index 76f85f605..75c6e7b46 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -490,6 +490,8 @@ def generic_repr(obj, additional_kw=(), to_inspect=None, omit_kwarg=()): val = getattr(obj, arg, missing) if val is not missing and val != defval: output.append('%s=%r' % (arg, val)) + except (SystemExit, KeyboardInterrupt): + raise except: pass @@ -499,6 +501,8 @@ def generic_repr(obj, additional_kw=(), to_inspect=None, omit_kwarg=()): val = getattr(obj, arg, missing) if val is not missing and val != defval: output.append('%s=%r' % (arg, val)) + except (SystemExit, KeyboardInterrupt): + raise except: pass @@ -1185,6 +1189,8 @@ def warn_exception(func, *args, **kwargs): """ try: return func(*args, **kwargs) + except (SystemExit, KeyboardInterrupt): + raise except: warn("%s('%s') ignored" % sys.exc_info()[0:2]) -- cgit v1.2.1 From 42837f4bca6a0b2fad05faade7837719f872c35d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 24 Sep 2014 14:49:30 -0400 Subject: - clarify documentation on exists() that it is preferred to be in the WHERE clause. fixes #3212 --- lib/sqlalchemy/orm/query.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index e6b2bf537..7b2ea7977 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -2616,6 +2616,19 @@ class Query(object): SELECT 1 FROM users WHERE users.name = :name_1 ) AS anon_1 + The EXISTS construct is usually used in the WHERE clause:: + + session.query(User.id).filter(q.exists()).scalar() + + Note that some databases such as SQL Server don't allow an + EXISTS expression to be present in the columns clause of a + SELECT. To select a simple boolean value based on the exists + as a WHERE, use :func:`.literal`:: + + from sqlalchemy import literal + + session.query(literal(True)).filter(q.exists()).scalar() + .. versionadded:: 0.8.1 """ -- cgit v1.2.1 From 5e7cb037e85acfbd8f064f7d4defb7ae07d0aff6 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 25 Sep 2014 21:07:15 -0400 Subject: - convert to spaces --- doc/build/changelog/migration_10.rst | 382 +++++++++++++++++------------------ 1 file changed, 191 insertions(+), 191 deletions(-) diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index de9e9a64c..b489dc2df 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -37,7 +37,7 @@ any SQL expression, in addition to integer values, as arguments. The ORM this is used to allow a bound parameter to be passed, which can be substituted with a value later:: - sel = select([table]).limit(bindparam('mylimit')).offset(bindparam('myoffset')) + sel = select([table]).limit(bindparam('mylimit')).offset(bindparam('myoffset')) Dialects which don't support non-integer LIMIT or OFFSET expressions may continue to not support this behavior; third party dialects may also need modification @@ -82,35 +82,35 @@ that a raw load of rows now populates ORM-based objects around 25% faster. Assuming a 1M row table, a script like the following illustrates the type of load that's improved the most:: - import time - from sqlalchemy import Integer, Column, create_engine, Table - from sqlalchemy.orm import Session - from sqlalchemy.ext.declarative import declarative_base + import time + from sqlalchemy import Integer, Column, create_engine, Table + from sqlalchemy.orm import Session + from sqlalchemy.ext.declarative import declarative_base - Base = declarative_base() + Base = declarative_base() - class Foo(Base): - __table__ = Table( - 'foo', Base.metadata, - Column('id', Integer, primary_key=True), - Column('a', Integer(), nullable=False), - Column('b', Integer(), nullable=False), - Column('c', Integer(), nullable=False), - ) + class Foo(Base): + __table__ = Table( + 'foo', Base.metadata, + Column('id', Integer, primary_key=True), + Column('a', Integer(), nullable=False), + Column('b', Integer(), nullable=False), + Column('c', Integer(), nullable=False), + ) - engine = create_engine( - 'mysql+mysqldb://scott:tiger@localhost/test', echo=True) + engine = create_engine( + 'mysql+mysqldb://scott:tiger@localhost/test', echo=True) - sess = Session(engine) + sess = Session(engine) - now = time.time() + now = time.time() - # avoid using all() so that we don't have the overhead of building - # a large list of full objects in memory - for obj in sess.query(Foo).yield_per(100).limit(1000000): - pass + # avoid using all() so that we don't have the overhead of building + # a large list of full objects in memory + for obj in sess.query(Foo).yield_per(100).limit(1000000): + pass - print("Total time: %d" % (time.time() - now)) + print("Total time: %d" % (time.time() - now)) Local MacBookPro results bench from 19 seconds for 0.9 down to 14 seconds for 1.0. The :meth:`.Query.yield_per` call is always a good idea when batching @@ -130,7 +130,7 @@ New KeyedTuple implementation dramatically faster We took a look into the :class:`.KeyedTuple` implementation in the hopes of improving queries like this:: - rows = sess.query(Foo.a, Foo.b, Foo.c).all() + rows = sess.query(Foo.a, Foo.b, Foo.c).all() The :class:`.KeyedTuple` class is used rather than Python's ``collections.namedtuple()``, because the latter has a very complex @@ -146,26 +146,26 @@ which scenario. In the "sweet spot", where we are both creating a good number of new types as well as fetching a good number of rows, the lightweight object totally smokes both namedtuple and KeyedTuple:: - ----------------- - size=10 num=10000 # few rows, lots of queries - namedtuple: 3.60302400589 # namedtuple falls over - keyedtuple: 0.255059957504 # KeyedTuple very fast - lw keyed tuple: 0.582715034485 # lw keyed trails right on KeyedTuple - ----------------- - size=100 num=1000 # <--- sweet spot - namedtuple: 0.365247011185 - keyedtuple: 0.24896979332 - lw keyed tuple: 0.0889317989349 # lw keyed blows both away! - ----------------- - size=10000 num=100 - namedtuple: 0.572599887848 - keyedtuple: 2.54251694679 - lw keyed tuple: 0.613876104355 - ----------------- - size=1000000 num=10 # few queries, lots of rows - namedtuple: 5.79669594765 # namedtuple very fast - keyedtuple: 28.856498003 # KeyedTuple falls over - lw keyed tuple: 6.74346804619 # lw keyed trails right on namedtuple + ----------------- + size=10 num=10000 # few rows, lots of queries + namedtuple: 3.60302400589 # namedtuple falls over + keyedtuple: 0.255059957504 # KeyedTuple very fast + lw keyed tuple: 0.582715034485 # lw keyed trails right on KeyedTuple + ----------------- + size=100 num=1000 # <--- sweet spot + namedtuple: 0.365247011185 + keyedtuple: 0.24896979332 + lw keyed tuple: 0.0889317989349 # lw keyed blows both away! + ----------------- + size=10000 num=100 + namedtuple: 0.572599887848 + keyedtuple: 2.54251694679 + lw keyed tuple: 0.613876104355 + ----------------- + size=1000000 num=10 # few queries, lots of rows + namedtuple: 5.79669594765 # namedtuple very fast + keyedtuple: 28.856498003 # KeyedTuple falls over + lw keyed tuple: 6.74346804619 # lw keyed trails right on namedtuple :ticket:`3176` @@ -195,27 +195,27 @@ them as duplicates. To illustrate, the following test script will show only ten warnings being emitted for ten of the parameter sets, out of a total of 1000:: - from sqlalchemy import create_engine, Unicode, select, cast - import random - import warnings + from sqlalchemy import create_engine, Unicode, select, cast + import random + import warnings - e = create_engine("sqlite://") + e = create_engine("sqlite://") - # Use the "once" filter (which is also the default for Python - # warnings). Exactly ten of these warnings will - # be emitted; beyond that, the Python warnings registry will accumulate - # new values as dupes of one of the ten existing. - warnings.filterwarnings("once") + # Use the "once" filter (which is also the default for Python + # warnings). Exactly ten of these warnings will + # be emitted; beyond that, the Python warnings registry will accumulate + # new values as dupes of one of the ten existing. + warnings.filterwarnings("once") - for i in range(1000): - e.execute(select([cast( - ('foo_%d' % random.randint(0, 1000000)).encode('ascii'), Unicode)])) + for i in range(1000): + e.execute(select([cast( + ('foo_%d' % random.randint(0, 1000000)).encode('ascii'), Unicode)])) The format of the warning here is:: - /path/lib/sqlalchemy/sql/sqltypes.py:186: SAWarning: Unicode type received - non-unicode bind param value 'foo_4852'. (this warning may be - suppressed after 10 occurrences) + /path/lib/sqlalchemy/sql/sqltypes.py:186: SAWarning: Unicode type received + non-unicode bind param value 'foo_4852'. (this warning may be + suppressed after 10 occurrences) :ticket:`3178` @@ -233,15 +233,15 @@ However, as these objects are class-bound descriptors, they must be accessed at the attribute. Below this is illustared using the :attr:`.Mapper.all_orm_descriptors` namespace:: - class SomeObject(Base): - # ... + class SomeObject(Base): + # ... - @hybrid_property - def some_prop(self): - return self.value + 5 + @hybrid_property + def some_prop(self): + return self.value + 5 - inspect(SomeObject).all_orm_descriptors.some_prop.info['foo'] = 'bar' + inspect(SomeObject).all_orm_descriptors.some_prop.info['foo'] = 'bar' It is also available as a constructor argument for all :class:`.SchemaItem` objects (e.g. :class:`.ForeignKey`, :class:`.UniqueConstraint` etc.) as well @@ -258,26 +258,26 @@ Change to single-table-inheritance criteria when using from_self(), count() Given a single-table inheritance mapping, such as:: - class Widget(Base): - __table__ = 'widget_table' + class Widget(Base): + __table__ = 'widget_table' - class FooWidget(Widget): - pass + class FooWidget(Widget): + pass Using :meth:`.Query.from_self` or :meth:`.Query.count` against a subclass would produce a subquery, but then add the "WHERE" criteria for subtypes to the outside:: - sess.query(FooWidget).from_self().all() + sess.query(FooWidget).from_self().all() rendering:: - SELECT - anon_1.widgets_id AS anon_1_widgets_id, - anon_1.widgets_type AS anon_1_widgets_type - FROM (SELECT widgets.id AS widgets_id, widgets.type AS widgets_type, - FROM widgets) AS anon_1 - WHERE anon_1.widgets_type IN (?) + SELECT + anon_1.widgets_id AS anon_1_widgets_id, + anon_1.widgets_type AS anon_1_widgets_type + FROM (SELECT widgets.id AS widgets_id, widgets.type AS widgets_type, + FROM widgets) AS anon_1 + WHERE anon_1.widgets_type IN (?) The issue with this is that if the inner query does not specify all columns, then we can't add the WHERE clause on the outside (it actually tries, @@ -286,23 +286,23 @@ apparently goes way back to 0.6.5 with the note "may need to make more adjustments to this". Well, those adjustments have arrived! So now the above query will render:: - SELECT - anon_1.widgets_id AS anon_1_widgets_id, - anon_1.widgets_type AS anon_1_widgets_type - FROM (SELECT widgets.id AS widgets_id, widgets.type AS widgets_type, - FROM widgets - WHERE widgets.type IN (?)) AS anon_1 + SELECT + anon_1.widgets_id AS anon_1_widgets_id, + anon_1.widgets_type AS anon_1_widgets_type + FROM (SELECT widgets.id AS widgets_id, widgets.type AS widgets_type, + FROM widgets + WHERE widgets.type IN (?)) AS anon_1 So that queries that don't include "type" will still work!:: - sess.query(FooWidget.id).count() + sess.query(FooWidget.id).count() Renders:: - SELECT count(*) AS count_1 - FROM (SELECT widgets.id AS widgets_id - FROM widgets - WHERE widgets.type IN (?)) AS anon_1 + SELECT count(*) AS count_1 + FROM (SELECT widgets.id AS widgets_id + FROM widgets + WHERE widgets.type IN (?)) AS anon_1 :ticket:`3177` @@ -319,67 +319,67 @@ as the "order by label" logic introduced in 0.9 (see :ref:`migration_1068`). Given a mapping like the following:: - class A(Base): - __tablename__ = 'a' + class A(Base): + __tablename__ = 'a' - id = Column(Integer, primary_key=True) + id = Column(Integer, primary_key=True) - class B(Base): - __tablename__ = 'b' + class B(Base): + __tablename__ = 'b' - id = Column(Integer, primary_key=True) - a_id = Column(ForeignKey('a.id')) + id = Column(Integer, primary_key=True) + a_id = Column(ForeignKey('a.id')) - A.b = column_property( - select([func.max(B.id)]).where(B.a_id == A.id).correlate(A) - ) + A.b = column_property( + select([func.max(B.id)]).where(B.a_id == A.id).correlate(A) + ) A simple scenario that included "A.b" twice would fail to render correctly:: - print sess.query(A, a1).order_by(a1.b) + print sess.query(A, a1).order_by(a1.b) This would order by the wrong column:: - SELECT a.id AS a_id, (SELECT max(b.id) AS max_1 FROM b - WHERE b.a_id = a.id) AS anon_1, a_1.id AS a_1_id, - (SELECT max(b.id) AS max_2 - FROM b WHERE b.a_id = a_1.id) AS anon_2 - FROM a, a AS a_1 ORDER BY anon_1 + SELECT a.id AS a_id, (SELECT max(b.id) AS max_1 FROM b + WHERE b.a_id = a.id) AS anon_1, a_1.id AS a_1_id, + (SELECT max(b.id) AS max_2 + FROM b WHERE b.a_id = a_1.id) AS anon_2 + FROM a, a AS a_1 ORDER BY anon_1 New output:: - SELECT a.id AS a_id, (SELECT max(b.id) AS max_1 - FROM b WHERE b.a_id = a.id) AS anon_1, a_1.id AS a_1_id, - (SELECT max(b.id) AS max_2 - FROM b WHERE b.a_id = a_1.id) AS anon_2 - FROM a, a AS a_1 ORDER BY anon_2 + SELECT a.id AS a_id, (SELECT max(b.id) AS max_1 + FROM b WHERE b.a_id = a.id) AS anon_1, a_1.id AS a_1_id, + (SELECT max(b.id) AS max_2 + FROM b WHERE b.a_id = a_1.id) AS anon_2 + FROM a, a AS a_1 ORDER BY anon_2 There were also many scenarios where the "order by" logic would fail to order by label, for example if the mapping were "polymorphic":: - class A(Base): - __tablename__ = 'a' + class A(Base): + __tablename__ = 'a' - id = Column(Integer, primary_key=True) - type = Column(String) + id = Column(Integer, primary_key=True) + type = Column(String) - __mapper_args__ = {'polymorphic_on': type, 'with_polymorphic': '*'} + __mapper_args__ = {'polymorphic_on': type, 'with_polymorphic': '*'} The order_by would fail to use the label, as it would be anonymized due to the polymorphic loading:: - SELECT a.id AS a_id, a.type AS a_type, (SELECT max(b.id) AS max_1 - FROM b WHERE b.a_id = a.id) AS anon_1 - FROM a ORDER BY (SELECT max(b.id) AS max_2 - FROM b WHERE b.a_id = a.id) + SELECT a.id AS a_id, a.type AS a_type, (SELECT max(b.id) AS max_1 + FROM b WHERE b.a_id = a.id) AS anon_1 + FROM a ORDER BY (SELECT max(b.id) AS max_2 + FROM b WHERE b.a_id = a.id) Now that the order by label tracks the anonymized label, this now works:: - SELECT a.id AS a_id, a.type AS a_type, (SELECT max(b.id) AS max_1 - FROM b WHERE b.a_id = a.id) AS anon_1 - FROM a ORDER BY anon_1 + SELECT a.id AS a_id, a.type AS a_type, (SELECT max(b.id) AS max_1 + FROM b WHERE b.a_id = a.id) AS anon_1 + FROM a ORDER BY anon_1 Included in these fixes are a variety of heisenbugs that could corrupt the state of an ``aliased()`` construct such that the labeling logic @@ -406,13 +406,13 @@ for :func:`.attributes.get_history` and related functions. Given an object with no state:: - >>> obj = Foo() + >>> obj = Foo() It has always been SQLAlchemy's behavior such that if we access a scalar or many-to-one attribute that was never set, it is returned as ``None``:: - >>> obj.someattr - None + >>> obj.someattr + None This value of ``None`` is in fact now part of the state of ``obj``, and is not unlike as though we had set the attribute explicitly, e.g. @@ -420,31 +420,31 @@ not unlike as though we had set the attribute explicitly, e.g. differently as far as history and events. It would not emit any attribute event, and additionally if we view history, we see this:: - >>> inspect(obj).attrs.someattr.history - History(added=(), unchanged=[None], deleted=()) # 0.9 and below + >>> inspect(obj).attrs.someattr.history + History(added=(), unchanged=[None], deleted=()) # 0.9 and below That is, it's as though the attribute were always ``None`` and were never changed. This is explicitly different from if we had set the attribute first instead:: - >>> obj = Foo() - >>> obj.someattr = None - >>> inspect(obj).attrs.someattr.history - History(added=[None], unchanged=(), deleted=()) # all versions + >>> obj = Foo() + >>> obj.someattr = None + >>> inspect(obj).attrs.someattr.history + History(added=[None], unchanged=(), deleted=()) # all versions The above means that the behavior of our "set" operation can be corrupted by the fact that the value was accessed via "get" earlier. In 1.0, this inconsistency has been resolved, by no longer actually setting anything when the default "getter" is used. - >>> obj = Foo() - >>> obj.someattr - None - >>> inspect(obj).attrs.someattr.history - History(added=(), unchanged=(), deleted=()) # 1.0 - >>> obj.someattr = None - >>> inspect(obj).attrs.someattr.history - History(added=[None], unchanged=(), deleted=()) + >>> obj = Foo() + >>> obj.someattr + None + >>> inspect(obj).attrs.someattr.history + History(added=(), unchanged=(), deleted=()) # 1.0 + >>> obj.someattr = None + >>> inspect(obj).attrs.someattr.history + History(added=[None], unchanged=(), deleted=()) The reason the above behavior hasn't had much impact is because the INSERT statement in relational databases considers a missing value to be @@ -482,17 +482,17 @@ with yield-per (subquery loading could be in theory, however). When this error is raised, the :func:`.lazyload` option can be sent with an asterisk:: - q = sess.query(Object).options(lazyload('*')).yield_per(100) + q = sess.query(Object).options(lazyload('*')).yield_per(100) or use :meth:`.Query.enable_eagerloads`:: - q = sess.query(Object).enable_eagerloads(False).yield_per(100) + q = sess.query(Object).enable_eagerloads(False).yield_per(100) The :func:`.lazyload` option has the advantage that additional many-to-one joined loader options can still be used:: - q = sess.query(Object).options( - lazyload('*'), joinedload("some_manytoone")).yield_per(100) + q = sess.query(Object).options( + lazyload('*'), joinedload("some_manytoone")).yield_per(100) .. _migration_migration_deprecated_orm_events: @@ -546,7 +546,7 @@ The unused ``result`` member is now removed:: .. seealso:: - :ref:`bundles` + :ref:`bundles` .. _migration_3008: @@ -565,12 +565,12 @@ As introduced in :ref:`feature_2976` from version 0.9, the behavior of join eager load will use a right-nested join. ``"nested"`` is now implied when using ``innerjoin=True``:: - query(User).options( - joinedload("orders", innerjoin=False).joinedload("items", innerjoin=True)) + query(User).options( + joinedload("orders", innerjoin=False).joinedload("items", innerjoin=True)) With the new default, this will render the FROM clause in the form:: - FROM users LEFT OUTER JOIN (orders JOIN items ON ) ON + FROM users LEFT OUTER JOIN (orders JOIN items ON ) ON That is, using a right-nested join for the INNER join so that the full result of ``users`` can be returned. The use of an INNER join is more efficient @@ -579,13 +579,13 @@ optimization parameter to take effect in all cases. To get the older behavior, use ``innerjoin="unnested"``:: - query(User).options( - joinedload("orders", innerjoin=False).joinedload("items", innerjoin="unnested")) + query(User).options( + joinedload("orders", innerjoin=False).joinedload("items", innerjoin="unnested")) This will avoid right-nested joins and chain the joins together using all OUTER joins despite the innerjoin directive:: - FROM users LEFT OUTER JOIN orders ON LEFT OUTER JOIN items ON + FROM users LEFT OUTER JOIN orders ON LEFT OUTER JOIN items ON As noted in the 0.9 notes, the only database backend that has difficulty with right-nested joins is SQLite; SQLAlchemy as of 0.9 converts a right-nested @@ -593,7 +593,7 @@ join into a subquery as a join target on SQLite. .. seealso:: - :ref:`feature_2976` - description of the feature as introduced in 0.9.4. + :ref:`feature_2976` - description of the feature as introduced in 0.9.4. :ticket:`3008` @@ -638,15 +638,15 @@ with SQL expressions into many functions, such as :meth:`.Select.where`, Note that by "SQL expressions" we mean a **full fragment of a SQL string**, such as:: - # the argument sent to where() is a full SQL expression - stmt = select([sometable]).where("somecolumn = 'value'") + # the argument sent to where() is a full SQL expression + stmt = select([sometable]).where("somecolumn = 'value'") and we are **not talking about string arguments**, that is, the normal behavior of passing string values that become parameterized:: - # This is a normal Core expression with a string argument - - # we aren't talking about this!! - stmt = select([sometable]).where(sometable.c.somecolumn == 'value') + # This is a normal Core expression with a string argument - + # we aren't talking about this!! + stmt = select([sometable]).where(sometable.c.somecolumn == 'value') The Core tutorial has long featured an example of the use of this technique, using a :func:`.select` construct where virtually all components of it @@ -660,25 +660,25 @@ So the change here is to encourage the user to qualify textual strings when composing SQL that is partially or fully composed from textual fragments. When composing a select as below:: - stmt = select(["a", "b"]).where("a = b").select_from("sometable") + stmt = select(["a", "b"]).where("a = b").select_from("sometable") The statement is built up normally, with all the same coercions as before. However, one will see the following warnings emitted:: - SAWarning: Textual column expression 'a' should be explicitly declared - with text('a'), or use column('a') for more specificity - (this warning may be suppressed after 10 occurrences) + SAWarning: Textual column expression 'a' should be explicitly declared + with text('a'), or use column('a') for more specificity + (this warning may be suppressed after 10 occurrences) - SAWarning: Textual column expression 'b' should be explicitly declared - with text('b'), or use column('b') for more specificity - (this warning may be suppressed after 10 occurrences) + SAWarning: Textual column expression 'b' should be explicitly declared + with text('b'), or use column('b') for more specificity + (this warning may be suppressed after 10 occurrences) - SAWarning: Textual SQL expression 'a = b' should be explicitly declared - as text('a = b') (this warning may be suppressed after 10 occurrences) + SAWarning: Textual SQL expression 'a = b' should be explicitly declared + as text('a = b') (this warning may be suppressed after 10 occurrences) - SAWarning: Textual SQL FROM expression 'sometable' should be explicitly - declared as text('sometable'), or use table('sometable') for more - specificity (this warning may be suppressed after 10 occurrences) + SAWarning: Textual SQL FROM expression 'sometable' should be explicitly + declared as text('sometable'), or use table('sometable') for more + specificity (this warning may be suppressed after 10 occurrences) These warnings attempt to show exactly where the issue is by displaying the parameters as well as where the string was received. @@ -688,14 +688,14 @@ one wishes the warnings to be exceptions, the `Python Warnings Filter `_ should be used:: - import warnings - warnings.simplefilter("error") # all warnings raise an exception + import warnings + warnings.simplefilter("error") # all warnings raise an exception Given the above warnings, our statement works just fine, but to get rid of the warnings we would rewrite our statement as follows:: - from sqlalchemy import select, text - stmt = select([ + from sqlalchemy import select, text + stmt = select([ text("a"), text("b") ]).where(text("a = b")).select_from(text("sometable")) @@ -703,10 +703,10 @@ to get rid of the warnings we would rewrite our statement as follows:: and as the warnings suggest, we can give our statement more specificity about the text if we use :func:`.column` and :func:`.table`:: - from sqlalchemy import select, text, column, table + from sqlalchemy import select, text, column, table - stmt = select([column("a"), column("b")]).\ - where(text("a = b")).select_from(table("sometable")) + stmt = select([column("a"), column("b")]).\ + where(text("a = b")).select_from(table("sometable")) Where note also that :func:`.table` and :func:`.column` can now be imported from "sqlalchemy" without the "sql" part. @@ -723,10 +723,10 @@ of this change we have enhanced its functionality. When we have a :func:`.select` or :class:`.Query` that refers to some column name or named label, we might want to GROUP BY and/or ORDER BY known columns or labels:: - stmt = select([ - user.c.name, - func.count(user.c.id).label("id_count") - ]).group_by("name").order_by("id_count") + stmt = select([ + user.c.name, + func.count(user.c.id).label("id_count") + ]).group_by("name").order_by("id_count") In the above statement we expect to see "ORDER BY id_count", as opposed to a re-statement of the function. The string argument given is actively @@ -734,24 +734,24 @@ matched to an entry in the columns clause during compilation, so the above statement would produce as we expect, without warnings (though note that the ``"name"`` expression has been resolved to ``users.name``!):: - SELECT users.name, count(users.id) AS id_count - FROM users GROUP BY users.name ORDER BY id_count + SELECT users.name, count(users.id) AS id_count + FROM users GROUP BY users.name ORDER BY id_count However, if we refer to a name that cannot be located, then we get the warning again, as below:: - stmt = select([ + stmt = select([ user.c.name, func.count(user.c.id).label("id_count") ]).order_by("some_label") The output does what we say, but again it warns us:: - SAWarning: Can't resolve label reference 'some_label'; converting to - text() (this warning may be suppressed after 10 occurrences) + SAWarning: Can't resolve label reference 'some_label'; converting to + text() (this warning may be suppressed after 10 occurrences) - SELECT users.name, count(users.id) AS id_count - FROM users ORDER BY some_label + SELECT users.name, count(users.id) AS id_count + FROM users ORDER BY some_label The above behavior applies to all those places where we might want to refer to a so-called "label reference"; ORDER BY and GROUP BY, but also within an @@ -761,7 +761,7 @@ Postgresql syntax). We can still specify any arbitrary expression for ORDER BY or others using :func:`.text`:: - stmt = select([users]).order_by(text("some special expression")) + stmt = select([users]).order_by(text("some special expression")) The upshot of the whole change is that SQLAlchemy now would like us to tell it when a string is sent that this string is explicitly @@ -822,7 +822,7 @@ data is needed. A :class:`.Table` can be set up for reflection by passing :paramref:`.Table.autoload_with` alone:: - my_table = Table('my_table', metadata, autoload_with=some_engine) + my_table = Table('my_table', metadata, autoload_with=some_engine) :ticket:`3027` @@ -855,15 +855,15 @@ The :func:`.inspect` method returns a :class:`.PGInspector` object in the case of Postgresql, which includes a new :meth:`.PGInspector.get_enums` method that returns information on all available ``ENUM`` types:: - from sqlalchemy import inspect, create_engine + from sqlalchemy import inspect, create_engine - engine = create_engine("postgresql+psycopg2://host/dbname") - insp = inspect(engine) - print(insp.get_enums()) + engine = create_engine("postgresql+psycopg2://host/dbname") + insp = inspect(engine) + print(insp.get_enums()) .. seealso:: - :meth:`.PGInspector.get_enums` + :meth:`.PGInspector.get_enums` .. _feature_2891: -- cgit v1.2.1 From 7f82c55fa764b031110309fb3a819e4b518e741d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 25 Sep 2014 21:08:17 -0400 Subject: - refactor of declarative, break up into indiviudal methods that are now affixed to _MapperConfig - declarative now creates column copies ahead of time so that they are ready to go for a declared_attr - overhaul of declared_attr; memoization, cascading modifier - A relationship set up with :class:`.declared_attr` on a :class:`.AbstractConcreteBase` base class will now be configured on the abstract base mapping automatically, in addition to being set up on descendant concrete classes as usual. fixes #2670 - The :class:`.declared_attr` construct has newly improved behaviors and features in conjunction with declarative. The decorated function will now have access to the final column copies present on the local mixin when invoked, and will also be invoked exactly once for each mapped class, the returned result being memoized. A new modifier :attr:`.declared_attr.cascading` is added as well. fixes #3150 - the original plan for #3150 has been scaled back; by copying mixin columns up front and memoizing, we don't actually need the "map properties later" thing. - full docs + migration notes --- doc/build/changelog/changelog_10.rst | 29 ++ doc/build/changelog/migration_10.rst | 136 +++++- doc/build/orm/extensions/declarative.rst | 1 + lib/sqlalchemy/ext/declarative/__init__.py | 117 ++++- lib/sqlalchemy/ext/declarative/api.py | 183 +++++++- lib/sqlalchemy/ext/declarative/base.py | 692 ++++++++++++++++------------- lib/sqlalchemy/orm/mapper.py | 13 +- lib/sqlalchemy/sql/schema.py | 6 +- lib/sqlalchemy/util/__init__.py | 3 +- lib/sqlalchemy/util/langhelpers.py | 15 +- test/ext/declarative/test_inheritance.py | 87 +++- test/ext/declarative/test_mixin.py | 199 ++++++++- 12 files changed, 1126 insertions(+), 355 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 88cae563f..536288c8f 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,35 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: bug, declarative + :tickets: 2670 + + A relationship set up with :class:`.declared_attr` on + a :class:`.AbstractConcreteBase` base class will now be configured + on the abstract base mapping automatically, in addition to being + set up on descendant concrete classes as usual. + + .. seealso:: + + :ref:`feature_3150` + + .. change:: + :tags: feature, declarative + :tickets: 3150 + + The :class:`.declared_attr` construct has newly improved + behaviors and features in conjunction with declarative. The + decorated function will now have access to the final column + copies present on the local mixin when invoked, and will also + be invoked exactly once for each mapped class, the returned result + being memoized. A new modifier :attr:`.declared_attr.cascading` + is added as well. + + .. seealso:: + + :ref:`feature_3150` + .. change:: :tags: feature, ext :tickets: 3210 diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index b489dc2df..0e9dd8d7b 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -8,7 +8,7 @@ What's New in SQLAlchemy 1.0? undergoing maintenance releases as of May, 2014, and SQLAlchemy version 1.0, as of yet unreleased. - Document last updated: September 7, 2014 + Document last updated: September 25, 2014 Introduction ============ @@ -307,6 +307,140 @@ Renders:: :ticket:`3177` +.. _feature_3150: + +Improvements to declarative mixins, ``@declared_attr`` and related features +---------------------------------------------------------------------------- + +The declarative system in conjunction with :class:`.declared_attr` has been +overhauled to support new capabilities. + +A function decorated with :class:`.declared_attr` is now called only **after** +any mixin-based column copies are generated. This means the function can +call upon mixin-established columns and will receive a reference to the correct +:class:`.Column` object:: + + class HasFooBar(object): + foobar = Column(Integer) + + @declared_attr + def foobar_prop(cls): + return column_property('foobar: ' + cls.foobar) + + class SomeClass(HasFooBar, Base): + __tablename__ = 'some_table' + id = Column(Integer, primary_key=True) + +Above, ``SomeClass.foobar_prop`` will be invoked against ``SomeClass``, +and ``SomeClass.foobar`` will be the final :class:`.Column` object that is +to be mapped to ``SomeClass``, as opposed to the non-copied object present +directly on ``HasFooBar``, even though the columns aren't mapped yet. + +The :class:`.declared_attr` function now **memoizes** the value +that's returned on a per-class basis, so that repeated calls to the same +attribute will return the same value. We can alter the example to illustrate +this:: + + class HasFooBar(object): + @declared_attr + def foobar(cls): + return Column(Integer) + + @declared_attr + def foobar_prop(cls): + return column_property('foobar: ' + cls.foobar) + + class SomeClass(HasFooBar, Base): + __tablename__ = 'some_table' + id = Column(Integer, primary_key=True) + +Previously, ``SomeClass`` would be mapped with one particular copy of +the ``foobar`` column, but the ``foobar_prop`` by calling upon ``foobar`` +a second time would produce a different column. The value of +``SomeClass.foobar`` is now memoized during declarative setup time, so that +even before the attribute is mapped by the mapper, the interim column +value will remain consistent no matter how many times the +:class:`.declared_attr` is called upon. + +The two behaviors above should help considerably with declarative definition +of many types of mapper properties that derive from other attributes, where +the :class:`.declared_attr` function is called upon from other +:class:`.declared_attr` functions locally present before the class is +actually mapped. + +For a pretty slim edge case where one wishes to build a declarative mixin +that establishes distinct columns per subclass, a new modifier +:attr:`.declared_attr.cascading` is added. With this modifier, the +decorated function will be invoked individually for each class in the +mapped inheritance hierarchy. While this is already the behavior for +special attributes such as ``__table_args__`` and ``__mapper_args__``, +for columns and other properties the behavior by default assumes that attribute +is affixed to the base class only, and just inherited from subclasses. +With :attr:`.declared_attr.cascading`, individual behaviors can be +applied:: + + class HasSomeAttribute(object): + @declared_attr.cascading + def some_id(cls): + if has_inherited_table(cls): + return Column(ForeignKey('myclass.id'), primary_key=True) + else: + return Column(Integer, primary_key=True) + + return Column('id', Integer, primary_key=True) + + class MyClass(HasSomeAttribute, Base): + "" + # ... + + class MySubClass(MyClass): + "" + # ... + +.. seealso:: + + :ref:`mixin_inheritance_columns` + +Finally, the :class:`.AbstractConcreteBase` class has been reworked +so that a relationship or other mapper property can be set up inline +on the abstract base:: + + from sqlalchemy import Column, Integer, ForeignKey + from sqlalchemy.orm import relationship + from sqlalchemy.ext.declarative import (declarative_base, declared_attr, + AbstractConcreteBase) + + Base = declarative_base() + + class Something(Base): + __tablename__ = u'something' + id = Column(Integer, primary_key=True) + + + class Abstract(AbstractConcreteBase, Base): + id = Column(Integer, primary_key=True) + + @declared_attr + def something_id(cls): + return Column(ForeignKey(Something.id)) + + @declared_attr + def something(cls): + return relationship(Something) + + + class Concrete(Abstract): + __tablename__ = u'cca' + __mapper_args__ = {'polymorphic_identity': 'cca', 'concrete': True} + + +The above mapping will set up a table ``cca`` with both an ``id`` and +a ``something_id`` column, and ``Concrete`` will also have a relationship +``something``. The new feature is that ``Abstract`` will also have an +independently configured relationship ``something`` that builds against +the polymorphic union of the base. + +:ticket:`3150` :ticket:`2670` :ticket:`3149` :ticket:`2952` :ticket:`3050` .. _bug_3188: diff --git a/doc/build/orm/extensions/declarative.rst b/doc/build/orm/extensions/declarative.rst index 636bb451b..7d9e634b5 100644 --- a/doc/build/orm/extensions/declarative.rst +++ b/doc/build/orm/extensions/declarative.rst @@ -13,6 +13,7 @@ API Reference .. autofunction:: as_declarative .. autoclass:: declared_attr + :members: .. autofunction:: sqlalchemy.ext.declarative.api._declarative_constructor diff --git a/lib/sqlalchemy/ext/declarative/__init__.py b/lib/sqlalchemy/ext/declarative/__init__.py index 3cbc85c0c..2b611252a 100644 --- a/lib/sqlalchemy/ext/declarative/__init__.py +++ b/lib/sqlalchemy/ext/declarative/__init__.py @@ -873,8 +873,7 @@ the method without the need to copy it. Columns generated by :class:`~.declared_attr` can also be referenced by ``__mapper_args__`` to a limited degree, currently -by ``polymorphic_on`` and ``version_id_col``, by specifying the -classdecorator itself into the dictionary - the declarative extension +by ``polymorphic_on`` and ``version_id_col``; the declarative extension will resolve them at class construction time:: class MyMixin: @@ -889,7 +888,6 @@ will resolve them at class construction time:: id = Column(Integer, primary_key=True) - Mixing in Relationships ~~~~~~~~~~~~~~~~~~~~~~~ @@ -922,6 +920,7 @@ reference a common target class via many-to-one:: __tablename__ = 'target' id = Column(Integer, primary_key=True) + Using Advanced Relationship Arguments (e.g. ``primaryjoin``, etc.) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1004,6 +1003,24 @@ requirement so that no reliance on copying is needed:: class Something(SomethingMixin, Base): __tablename__ = "something" +The :func:`.column_property` or other construct may refer +to other columns from the mixin. These are copied ahead of time before +the :class:`.declared_attr` is invoked:: + + class SomethingMixin(object): + x = Column(Integer) + + y = Column(Integer) + + @declared_attr + def x_plus_y(cls): + return column_property(cls.x + cls.y) + + +.. versionchanged:: 1.0.0 mixin columns are copied to the final mapped class + so that :class:`.declared_attr` methods can access the actual column + that will be mapped. + Mixing in Association Proxy and Other Attributes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1087,19 +1104,20 @@ and ``TypeB`` classes. Controlling table inheritance with mixins ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The ``__tablename__`` attribute in conjunction with the hierarchy of -classes involved in a declarative mixin scenario controls what type of -table inheritance, if any, -is configured by the declarative extension. +The ``__tablename__`` attribute may be used to provide a function that +will determine the name of the table used for each class in an inheritance +hierarchy, as well as whether a class has its own distinct table. -If the ``__tablename__`` is computed by a mixin, you may need to -control which classes get the computed attribute in order to get the -type of table inheritance you require. +This is achieved using the :class:`.declared_attr` indicator in conjunction +with a method named ``__tablename__()``. Declarative will always +invoke :class:`.declared_attr` for the special names +``__tablename__``, ``__mapper_args__`` and ``__table_args__`` +function **for each mapped class in the hierarchy**. The function therefore +needs to expect to receive each class individually and to provide the +correct answer for each. -For example, if you had a mixin that computes ``__tablename__`` but -where you wanted to use that mixin in a single table inheritance -hierarchy, you can explicitly specify ``__tablename__`` as ``None`` to -indicate that the class should not have a table mapped:: +For example, to create a mixin that gives every class a simple table +name based on class name:: from sqlalchemy.ext.declarative import declared_attr @@ -1118,15 +1136,10 @@ indicate that the class should not have a table mapped:: __mapper_args__ = {'polymorphic_identity': 'engineer'} primary_language = Column(String(50)) -Alternatively, you can make the mixin intelligent enough to only -return a ``__tablename__`` in the event that no table is already -mapped in the inheritance hierarchy. To help with this, a -:func:`~sqlalchemy.ext.declarative.has_inherited_table` helper -function is provided that returns ``True`` if a parent class already -has a mapped table. - -As an example, here's a mixin that will only allow single table -inheritance:: +Alternatively, we can modify our ``__tablename__`` function to return +``None`` for subclasses, using :func:`.has_inherited_table`. This has +the effect of those subclasses being mapped with single table inheritance +agaisnt the parent:: from sqlalchemy.ext.declarative import declared_attr from sqlalchemy.ext.declarative import has_inherited_table @@ -1147,6 +1160,64 @@ inheritance:: primary_language = Column(String(50)) __mapper_args__ = {'polymorphic_identity': 'engineer'} +.. _mixin_inheritance_columns: + +Mixing in Columns in Inheritance Scenarios +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In constrast to how ``__tablename__`` and other special names are handled when +used with :class:`.declared_attr`, when we mix in columns and properties (e.g. +relationships, column properties, etc.), the function is +invoked for the **base class only** in the hierarchy. Below, only the +``Person`` class will receive a column +called ``id``; the mapping will fail on ``Engineer``, which is not given +a primary key:: + + class HasId(object): + @declared_attr + def id(cls): + return Column('id', Integer, primary_key=True) + + class Person(HasId, Base): + __tablename__ = 'person' + discriminator = Column('type', String(50)) + __mapper_args__ = {'polymorphic_on': discriminator} + + class Engineer(Person): + __tablename__ = 'engineer' + primary_language = Column(String(50)) + __mapper_args__ = {'polymorphic_identity': 'engineer'} + +It is usually the case in joined-table inheritance that we want distinctly +named columns on each subclass. However in this case, we may want to have +an ``id`` column on every table, and have them refer to each other via +foreign key. We can achieve this as a mixin by using the +:attr:`.declared_attr.cascading` modifier, which indicates that the +function should be invoked **for each class in the hierarchy**, just like +it does for ``__tablename__``:: + + class HasId(object): + @declared_attr.cascading + def id(cls): + if has_inherited_table(cls): + return Column('id', + Integer, + ForeignKey('person.id'), primary_key=True) + else: + return Column('id', Integer, primary_key=True) + + class Person(HasId, Base): + __tablename__ = 'person' + discriminator = Column('type', String(50)) + __mapper_args__ = {'polymorphic_on': discriminator} + + class Engineer(Person): + __tablename__ = 'engineer' + primary_language = Column(String(50)) + __mapper_args__ = {'polymorphic_identity': 'engineer'} + + +.. versionadded:: 1.0.0 added :attr:`.declared_attr.cascading`. Combining Table/Mapper Arguments from Multiple Mixins ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py index daf8bffb5..e84b21ad2 100644 --- a/lib/sqlalchemy/ext/declarative/api.py +++ b/lib/sqlalchemy/ext/declarative/api.py @@ -8,12 +8,13 @@ from ...schema import Table, MetaData -from ...orm import synonym as _orm_synonym, mapper,\ +from ...orm import synonym as _orm_synonym, \ comparable_property,\ - interfaces, properties + interfaces, properties, attributes from ...orm.util import polymorphic_union from ...orm.base import _mapper_or_none -from ...util import OrderedDict +from ...util import OrderedDict, hybridmethod, hybridproperty +from ... import util from ... import exc import weakref @@ -21,7 +22,6 @@ from .base import _as_declarative, \ _declarative_constructor,\ _DeferredMapperConfig, _add_attribute from .clsregistry import _class_resolver -from . import clsregistry def instrument_declarative(cls, registry, metadata): @@ -157,12 +157,98 @@ class declared_attr(interfaces._MappedAttribute, property): """ - def __init__(self, fget, *arg, **kw): - super(declared_attr, self).__init__(fget, *arg, **kw) + def __init__(self, fget, cascading=False): + super(declared_attr, self).__init__(fget) self.__doc__ = fget.__doc__ + self._cascading = cascading def __get__(desc, self, cls): - return desc.fget(cls) + # use the ClassManager for memoization of values. This is better than + # adding yet another attribute onto the class, or using weakrefs + # here which are slow and take up memory. It also allows us to + # warn for non-mapped use of declared_attr. + + manager = attributes.manager_of_class(cls) + if manager is None: + util.warn( + "Unmanaged access of declarative attribute %s from " + "non-mapped class %s" % + (desc.fget.__name__, cls.__name__)) + return desc.fget(cls) + try: + reg = manager.info['declared_attr_reg'] + except KeyError: + raise exc.InvalidRequestError( + "@declared_attr called outside of the " + "declarative mapping process; is declarative_base() being " + "used correctly?") + + if desc in reg: + return reg[desc] + else: + reg[desc] = obj = desc.fget(cls) + return obj + + @hybridmethod + def _stateful(cls, **kw): + return _stateful_declared_attr(**kw) + + @hybridproperty + def cascading(cls): + """Mark a :class:`.declared_attr` as cascading. + + This is a special-use modifier which indicates that a column + or MapperProperty-based declared attribute should be configured + distinctly per mapped subclass, within a mapped-inheritance scenario. + + Below, both MyClass as well as MySubClass will have a distinct + ``id`` Column object established:: + + class HasSomeAttribute(object): + @declared_attr.cascading + def some_id(cls): + if has_inherited_table(cls): + return Column( + ForeignKey('myclass.id'), primary_key=True) + else: + return Column(Integer, primary_key=True) + + return Column('id', Integer, primary_key=True) + + class MyClass(HasSomeAttribute, Base): + "" + # ... + + class MySubClass(MyClass): + "" + # ... + + The behavior of the above configuration is that ``MySubClass`` + will refer to both its own ``id`` column as well as that of + ``MyClass`` underneath the attribute named ``some_id``. + + .. seealso:: + + :ref:`declarative_inheritance` + + :ref:`mixin_inheritance_columns` + + + """ + return cls._stateful(cascading=True) + + +class _stateful_declared_attr(declared_attr): + def __init__(self, **kw): + self.kw = kw + + def _stateful(self, **kw): + new_kw = self.kw.copy() + new_kw.update(kw) + return _stateful_declared_attr(**new_kw) + + def __call__(self, fn): + return declared_attr(fn, **self.kw) def declarative_base(bind=None, metadata=None, mapper=None, cls=object, @@ -349,9 +435,11 @@ class AbstractConcreteBase(ConcreteBase): ``__declare_last__()`` function, which is essentially a hook for the :meth:`.after_configured` event. - :class:`.AbstractConcreteBase` does not produce a mapped - table for the class itself. Compare to :class:`.ConcreteBase`, - which does. + :class:`.AbstractConcreteBase` does produce a mapped class + for the base class, however it is not persisted to any table; it + is instead mapped directly to the "polymorphic" selectable directly + and is only used for selecting. Compare to :class:`.ConcreteBase`, + which does create a persisted table for the base class. Example:: @@ -365,20 +453,72 @@ class AbstractConcreteBase(ConcreteBase): employee_id = Column(Integer, primary_key=True) name = Column(String(50)) manager_data = Column(String(40)) + __mapper_args__ = { - 'polymorphic_identity':'manager', - 'concrete':True} + 'polymorphic_identity':'manager', + 'concrete':True} + + The abstract base class is handled by declarative in a special way; + at class configuration time, it behaves like a declarative mixin + or an ``__abstract__`` base class. Once classes are configured + and mappings are produced, it then gets mapped itself, but + after all of its decscendants. This is a very unique system of mapping + not found in any other SQLAlchemy system. + + Using this approach, we can specify columns and properties + that will take place on mapped subclasses, in the way that + we normally do as in :ref:`declarative_mixins`:: + + class Company(Base): + __tablename__ = 'company' + id = Column(Integer, primary_key=True) + + class Employee(AbstractConcreteBase, Base): + employee_id = Column(Integer, primary_key=True) + + @declared_attr + def company_id(cls): + return Column(ForeignKey('company.id')) + + @declared_attr + def company(cls): + return relationship("Company") + + class Manager(Employee): + __tablename__ = 'manager' + + name = Column(String(50)) + manager_data = Column(String(40)) + + __mapper_args__ = { + 'polymorphic_identity':'manager', + 'concrete':True} + + When we make use of our mappings however, both ``Manager`` and + ``Employee`` will have an independently usable ``.company`` attribute:: + + session.query(Employee).filter(Employee.company.has(id=5)) + + .. versionchanged:: 1.0.0 - The mechanics of :class:`.AbstractConcreteBase` + have been reworked to support relationships established directly + on the abstract base, without any special configurational steps. + """ - __abstract__ = True + __no_table__ = True @classmethod def __declare_first__(cls): - if hasattr(cls, '__mapper__'): + cls._sa_decl_prepare_nocascade() + + @classmethod + def _sa_decl_prepare_nocascade(cls): + if getattr(cls, '__mapper__', None): return - clsregistry.add_class(cls.__name__, cls) + to_map = _DeferredMapperConfig.config_for_cls(cls) + # can't rely on 'self_and_descendants' here # since technically an immediate subclass # might not be mapped, but a subclass @@ -392,7 +532,18 @@ class AbstractConcreteBase(ConcreteBase): if mn is not None: mappers.append(mn) pjoin = cls._create_polymorphic_union(mappers) - cls.__mapper__ = m = mapper(cls, pjoin, polymorphic_on=pjoin.c.type) + + to_map.local_table = pjoin + + m_args = to_map.mapper_args_fn or dict + + def mapper_args(): + args = m_args() + args['polymorphic_on'] = pjoin.c.type + return args + to_map.mapper_args_fn = mapper_args + + m = to_map.map() for scls in cls.__subclasses__(): sm = _mapper_or_none(scls) diff --git a/lib/sqlalchemy/ext/declarative/base.py b/lib/sqlalchemy/ext/declarative/base.py index 94baeeb51..9cf07e208 100644 --- a/lib/sqlalchemy/ext/declarative/base.py +++ b/lib/sqlalchemy/ext/declarative/base.py @@ -19,6 +19,9 @@ from ... import event from . import clsregistry import collections import weakref +from sqlalchemy.orm import instrumentation + +declared_attr = declarative_props = None def _declared_mapping_info(cls): @@ -32,322 +35,402 @@ def _declared_mapping_info(cls): return None +def _get_immediate_cls_attr(cls, attrname): + """return an attribute of the class that is either present directly + on the class, e.g. not on a superclass, or is from a superclass but + this superclass is a mixin, that is, not a descendant of + the declarative base. + + This is used to detect attributes that indicate something about + a mapped class independently from any mapped classes that it may + inherit from. + + """ + for base in cls.__mro__: + _is_declarative_inherits = hasattr(base, '_decl_class_registry') + if attrname in base.__dict__: + value = getattr(base, attrname) + if (base is cls or + (base in cls.__bases__ and not _is_declarative_inherits)): + return value + else: + return None + + def _as_declarative(cls, classname, dict_): - from .api import declared_attr + global declared_attr, declarative_props + if declared_attr is None: + from .api import declared_attr + declarative_props = (declared_attr, util.classproperty) - # dict_ will be a dictproxy, which we can't write to, and we need to! - dict_ = dict(dict_) + if _get_immediate_cls_attr(cls, '__abstract__'): + return - column_copies = {} - potential_columns = {} + _MapperConfig.setup_mapping(cls, classname, dict_) - mapper_args_fn = None - table_args = inherited_table_args = None - tablename = None - declarative_props = (declared_attr, util.classproperty) +class _MapperConfig(object): - for base in cls.__mro__: - _is_declarative_inherits = hasattr(base, '_decl_class_registry') + @classmethod + def setup_mapping(cls, cls_, classname, dict_): + defer_map = _get_immediate_cls_attr( + cls_, '_sa_decl_prepare_nocascade') or \ + hasattr(cls_, '_sa_decl_prepare') - if '__declare_last__' in base.__dict__: - @event.listens_for(mapper, "after_configured") - def go(): - cls.__declare_last__() - if '__declare_first__' in base.__dict__: - @event.listens_for(mapper, "before_configured") - def go(): - cls.__declare_first__() - if '__abstract__' in base.__dict__ and base.__abstract__: - if (base is cls or - (base in cls.__bases__ and not _is_declarative_inherits)): - return + if defer_map: + cfg_cls = _DeferredMapperConfig + else: + cfg_cls = _MapperConfig + cfg_cls(cls_, classname, dict_) - class_mapped = _declared_mapping_info(base) is not None + def __init__(self, cls_, classname, dict_): - for name, obj in vars(base).items(): - if name == '__mapper_args__': - if not mapper_args_fn and ( - not class_mapped or - isinstance(obj, declarative_props) - ): - # don't even invoke __mapper_args__ until - # after we've determined everything about the - # mapped table. - # make a copy of it so a class-level dictionary - # is not overwritten when we update column-based - # arguments. - mapper_args_fn = lambda: dict(cls.__mapper_args__) - elif name == '__tablename__': - if not tablename and ( - not class_mapped or - isinstance(obj, declarative_props) - ): - tablename = cls.__tablename__ - elif name == '__table_args__': - if not table_args and ( - not class_mapped or - isinstance(obj, declarative_props) - ): - table_args = cls.__table_args__ - if not isinstance(table_args, (tuple, dict, type(None))): - raise exc.ArgumentError( - "__table_args__ value must be a tuple, " - "dict, or None") - if base is not cls: - inherited_table_args = True - elif class_mapped: - if isinstance(obj, declarative_props): - util.warn("Regular (i.e. not __special__) " - "attribute '%s.%s' uses @declared_attr, " - "but owning class %s is mapped - " - "not applying to subclass %s." - % (base.__name__, name, base, cls)) - continue - elif base is not cls: - # we're a mixin. - if isinstance(obj, Column): - if getattr(cls, name) is not obj: - # if column has been overridden - # (like by the InstrumentedAttribute of the - # superclass), skip + self.cls = cls_ + + # dict_ will be a dictproxy, which we can't write to, and we need to! + self.dict_ = dict(dict_) + self.classname = classname + self.mapped_table = None + self.properties = util.OrderedDict() + self.declared_columns = set() + self.column_copies = {} + self._setup_declared_events() + + # register up front, so that @declared_attr can memoize + # function evaluations in .info + manager = instrumentation.register_class(self.cls) + manager.info['declared_attr_reg'] = {} + + self._scan_attributes() + + clsregistry.add_class(self.classname, self.cls) + + self._extract_mappable_attributes() + + self._extract_declared_columns() + + self._setup_table() + + self._setup_inheritance() + + self._early_mapping() + + def _early_mapping(self): + self.map() + + def _setup_declared_events(self): + if _get_immediate_cls_attr(self.cls, '__declare_last__'): + @event.listens_for(mapper, "after_configured") + def after_configured(): + self.cls.__declare_last__() + + if _get_immediate_cls_attr(self.cls, '__declare_first__'): + @event.listens_for(mapper, "before_configured") + def before_configured(): + self.cls.__declare_first__() + + def _scan_attributes(self): + cls = self.cls + dict_ = self.dict_ + column_copies = self.column_copies + mapper_args_fn = None + table_args = inherited_table_args = None + tablename = None + + for base in cls.__mro__: + class_mapped = base is not cls and \ + _declared_mapping_info(base) is not None and \ + not _get_immediate_cls_attr(base, '_sa_decl_prepare_nocascade') + + if not class_mapped and base is not cls: + self._produce_column_copies(base) + + for name, obj in vars(base).items(): + if name == '__mapper_args__': + if not mapper_args_fn and ( + not class_mapped or + isinstance(obj, declarative_props) + ): + # don't even invoke __mapper_args__ until + # after we've determined everything about the + # mapped table. + # make a copy of it so a class-level dictionary + # is not overwritten when we update column-based + # arguments. + mapper_args_fn = lambda: dict(cls.__mapper_args__) + elif name == '__tablename__': + if not tablename and ( + not class_mapped or + isinstance(obj, declarative_props) + ): + tablename = cls.__tablename__ + elif name == '__table_args__': + if not table_args and ( + not class_mapped or + isinstance(obj, declarative_props) + ): + table_args = cls.__table_args__ + if not isinstance( + table_args, (tuple, dict, type(None))): + raise exc.ArgumentError( + "__table_args__ value must be a tuple, " + "dict, or None") + if base is not cls: + inherited_table_args = True + elif class_mapped: + if isinstance(obj, declarative_props): + util.warn("Regular (i.e. not __special__) " + "attribute '%s.%s' uses @declared_attr, " + "but owning class %s is mapped - " + "not applying to subclass %s." + % (base.__name__, name, base, cls)) + continue + elif base is not cls: + # we're a mixin, abstract base, or something that is + # acting like that for now. + if isinstance(obj, Column): + # already copied columns to the mapped class. continue - if obj.foreign_keys: + elif isinstance(obj, MapperProperty): raise exc.InvalidRequestError( - "Columns with foreign keys to other columns " - "must be declared as @declared_attr callables " - "on declarative mixin classes. ") - if name not in dict_ and not ( - '__table__' in dict_ and - (obj.name or name) in dict_['__table__'].c - ) and name not in potential_columns: - potential_columns[name] = \ - column_copies[obj] = \ - obj.copy() - column_copies[obj]._creation_order = \ - obj._creation_order - elif isinstance(obj, MapperProperty): + "Mapper properties (i.e. deferred," + "column_property(), relationship(), etc.) must " + "be declared as @declared_attr callables " + "on declarative mixin classes.") + elif isinstance(obj, declarative_props): + oldclassprop = isinstance(obj, util.classproperty) + if not oldclassprop and obj._cascading: + dict_[name] = column_copies[obj] = \ + ret = obj.__get__(obj, cls) + else: + if oldclassprop: + util.warn_deprecated( + "Use of sqlalchemy.util.classproperty on " + "declarative classes is deprecated.") + dict_[name] = column_copies[obj] = \ + ret = getattr(cls, name) + if isinstance(ret, (Column, MapperProperty)) and \ + ret.doc is None: + ret.doc = obj.__doc__ + + if inherited_table_args and not tablename: + table_args = None + + self.table_args = table_args + self.tablename = tablename + self.mapper_args_fn = mapper_args_fn + + def _produce_column_copies(self, base): + cls = self.cls + dict_ = self.dict_ + column_copies = self.column_copies + # copy mixin columns to the mapped class + for name, obj in vars(base).items(): + if isinstance(obj, Column): + if getattr(cls, name) is not obj: + # if column has been overridden + # (like by the InstrumentedAttribute of the + # superclass), skip + continue + elif obj.foreign_keys: raise exc.InvalidRequestError( - "Mapper properties (i.e. deferred," - "column_property(), relationship(), etc.) must " - "be declared as @declared_attr callables " - "on declarative mixin classes.") - elif isinstance(obj, declarative_props): - dict_[name] = ret = \ - column_copies[obj] = getattr(cls, name) - if isinstance(ret, (Column, MapperProperty)) and \ - ret.doc is None: - ret.doc = obj.__doc__ - - # apply inherited columns as we should - for k, v in potential_columns.items(): - dict_[k] = v - - if inherited_table_args and not tablename: - table_args = None - - clsregistry.add_class(classname, cls) - our_stuff = util.OrderedDict() - - for k in list(dict_): - - # TODO: improve this ? all dunders ? - if k in ('__table__', '__tablename__', '__mapper_args__'): - continue - - value = dict_[k] - if isinstance(value, declarative_props): - value = getattr(cls, k) - - elif isinstance(value, QueryableAttribute) and \ - value.class_ is not cls and \ - value.key != k: - # detect a QueryableAttribute that's already mapped being - # assigned elsewhere in userland, turn into a synonym() - value = synonym(value.key) - setattr(cls, k, value) - - if (isinstance(value, tuple) and len(value) == 1 and - isinstance(value[0], (Column, MapperProperty))): - util.warn("Ignoring declarative-like tuple value of attribute " - "%s: possibly a copy-and-paste error with a comma " - "left at the end of the line?" % k) - continue - if not isinstance(value, (Column, MapperProperty)): - if not k.startswith('__'): - dict_.pop(k) - setattr(cls, k, value) - continue - if k == 'metadata': - raise exc.InvalidRequestError( - "Attribute name 'metadata' is reserved " - "for the MetaData instance when using a " - "declarative base class." - ) - prop = clsregistry._deferred_relationship(cls, value) - our_stuff[k] = prop - - # set up attributes in the order they were created - our_stuff.sort(key=lambda key: our_stuff[key]._creation_order) - - # extract columns from the class dict - declared_columns = set() - name_to_prop_key = collections.defaultdict(set) - for key, c in list(our_stuff.items()): - if isinstance(c, (ColumnProperty, CompositeProperty)): - for col in c.columns: - if isinstance(col, Column) and \ - col.table is None: - _undefer_column_name(key, col) - if not isinstance(c, CompositeProperty): - name_to_prop_key[col.name].add(key) - declared_columns.add(col) - elif isinstance(c, Column): - _undefer_column_name(key, c) - name_to_prop_key[c.name].add(key) - declared_columns.add(c) - # if the column is the same name as the key, - # remove it from the explicit properties dict. - # the normal rules for assigning column-based properties - # will take over, including precedence of columns - # in multi-column ColumnProperties. - if key == c.key: - del our_stuff[key] - - for name, keys in name_to_prop_key.items(): - if len(keys) > 1: - util.warn( - "On class %r, Column object %r named directly multiple times, " - "only one will be used: %s" % - (classname, name, (", ".join(sorted(keys)))) - ) + "Columns with foreign keys to other columns " + "must be declared as @declared_attr callables " + "on declarative mixin classes. ") + elif name not in dict_ and not ( + '__table__' in dict_ and + (obj.name or name) in dict_['__table__'].c + ): + column_copies[obj] = copy_ = obj.copy() + copy_._creation_order = obj._creation_order + setattr(cls, name, copy_) + dict_[name] = copy_ - declared_columns = sorted( - declared_columns, key=lambda c: c._creation_order) - table = None + def _extract_mappable_attributes(self): + cls = self.cls + dict_ = self.dict_ - if hasattr(cls, '__table_cls__'): - table_cls = util.unbound_method_to_callable(cls.__table_cls__) - else: - table_cls = Table - - if '__table__' not in dict_: - if tablename is not None: - - args, table_kw = (), {} - if table_args: - if isinstance(table_args, dict): - table_kw = table_args - elif isinstance(table_args, tuple): - if isinstance(table_args[-1], dict): - args, table_kw = table_args[0:-1], table_args[-1] - else: - args = table_args - - autoload = dict_.get('__autoload__') - if autoload: - table_kw['autoload'] = True - - cls.__table__ = table = table_cls( - tablename, cls.metadata, - *(tuple(declared_columns) + tuple(args)), - **table_kw) - else: - table = cls.__table__ - if declared_columns: - for c in declared_columns: - if not table.c.contains_column(c): - raise exc.ArgumentError( - "Can't add additional column %r when " - "specifying __table__" % c.key - ) + our_stuff = self.properties - if hasattr(cls, '__mapper_cls__'): - mapper_cls = util.unbound_method_to_callable(cls.__mapper_cls__) - else: - mapper_cls = mapper + for k in list(dict_): - for c in cls.__bases__: - if _declared_mapping_info(c) is not None: - inherits = c - break - else: - inherits = None + # TODO: improve this ? all dunders ? + if k in ('__table__', '__tablename__', '__mapper_args__'): + continue - if table is None and inherits is None: - raise exc.InvalidRequestError( - "Class %r does not have a __table__ or __tablename__ " - "specified and does not inherit from an existing " - "table-mapped class." % cls - ) - elif inherits: - inherited_mapper = _declared_mapping_info(inherits) - inherited_table = inherited_mapper.local_table - inherited_mapped_table = inherited_mapper.mapped_table - - if table is None: - # single table inheritance. - # ensure no table args - if table_args: - raise exc.ArgumentError( - "Can't place __table_args__ on an inherited class " - "with no table." + value = dict_[k] + if isinstance(value, declarative_props): + value = getattr(cls, k) + + elif isinstance(value, QueryableAttribute) and \ + value.class_ is not cls and \ + value.key != k: + # detect a QueryableAttribute that's already mapped being + # assigned elsewhere in userland, turn into a synonym() + value = synonym(value.key) + setattr(cls, k, value) + + if (isinstance(value, tuple) and len(value) == 1 and + isinstance(value[0], (Column, MapperProperty))): + util.warn("Ignoring declarative-like tuple value of attribute " + "%s: possibly a copy-and-paste error with a comma " + "left at the end of the line?" % k) + continue + if not isinstance(value, (Column, MapperProperty)): + if not k.startswith('__'): + dict_.pop(k) + setattr(cls, k, value) + continue + if k == 'metadata': + raise exc.InvalidRequestError( + "Attribute name 'metadata' is reserved " + "for the MetaData instance when using a " + "declarative base class." + ) + prop = clsregistry._deferred_relationship(cls, value) + our_stuff[k] = prop + + def _extract_declared_columns(self): + our_stuff = self.properties + + # set up attributes in the order they were created + our_stuff.sort(key=lambda key: our_stuff[key]._creation_order) + + # extract columns from the class dict + declared_columns = self.declared_columns + name_to_prop_key = collections.defaultdict(set) + for key, c in list(our_stuff.items()): + if isinstance(c, (ColumnProperty, CompositeProperty)): + for col in c.columns: + if isinstance(col, Column) and \ + col.table is None: + _undefer_column_name(key, col) + if not isinstance(c, CompositeProperty): + name_to_prop_key[col.name].add(key) + declared_columns.add(col) + elif isinstance(c, Column): + _undefer_column_name(key, c) + name_to_prop_key[c.name].add(key) + declared_columns.add(c) + # if the column is the same name as the key, + # remove it from the explicit properties dict. + # the normal rules for assigning column-based properties + # will take over, including precedence of columns + # in multi-column ColumnProperties. + if key == c.key: + del our_stuff[key] + + for name, keys in name_to_prop_key.items(): + if len(keys) > 1: + util.warn( + "On class %r, Column object %r named " + "directly multiple times, " + "only one will be used: %s" % + (self.classname, name, (", ".join(sorted(keys)))) ) - # add any columns declared here to the inherited table. - for c in declared_columns: - if c.primary_key: - raise exc.ArgumentError( - "Can't place primary key columns on an inherited " - "class with no table." - ) - if c.name in inherited_table.c: - if inherited_table.c[c.name] is c: - continue - raise exc.ArgumentError( - "Column '%s' on class %s conflicts with " - "existing column '%s'" % - (c, cls, inherited_table.c[c.name]) - ) - inherited_table.append_column(c) - if inherited_mapped_table is not None and \ - inherited_mapped_table is not inherited_table: - inherited_mapped_table._refresh_for_new_column(c) - - defer_map = hasattr(cls, '_sa_decl_prepare') - if defer_map: - cfg_cls = _DeferredMapperConfig - else: - cfg_cls = _MapperConfig - mt = cfg_cls(mapper_cls, - cls, table, - inherits, - declared_columns, - column_copies, - our_stuff, - mapper_args_fn) - if not defer_map: - mt.map() + def _setup_table(self): + cls = self.cls + tablename = self.tablename + table_args = self.table_args + dict_ = self.dict_ + declared_columns = self.declared_columns -class _MapperConfig(object): + declared_columns = self.declared_columns = sorted( + declared_columns, key=lambda c: c._creation_order) + table = None - mapped_table = None - - def __init__(self, mapper_cls, - cls, - table, - inherits, - declared_columns, - column_copies, - properties, mapper_args_fn): - self.mapper_cls = mapper_cls - self.cls = cls + if hasattr(cls, '__table_cls__'): + table_cls = util.unbound_method_to_callable(cls.__table_cls__) + else: + table_cls = Table + + if '__table__' not in dict_: + if tablename is not None: + + args, table_kw = (), {} + if table_args: + if isinstance(table_args, dict): + table_kw = table_args + elif isinstance(table_args, tuple): + if isinstance(table_args[-1], dict): + args, table_kw = table_args[0:-1], table_args[-1] + else: + args = table_args + + autoload = dict_.get('__autoload__') + if autoload: + table_kw['autoload'] = True + + cls.__table__ = table = table_cls( + tablename, cls.metadata, + *(tuple(declared_columns) + tuple(args)), + **table_kw) + else: + table = cls.__table__ + if declared_columns: + for c in declared_columns: + if not table.c.contains_column(c): + raise exc.ArgumentError( + "Can't add additional column %r when " + "specifying __table__" % c.key + ) self.local_table = table - self.inherits = inherits - self.properties = properties - self.mapper_args_fn = mapper_args_fn - self.declared_columns = declared_columns - self.column_copies = column_copies + + def _setup_inheritance(self): + table = self.local_table + cls = self.cls + table_args = self.table_args + declared_columns = self.declared_columns + for c in cls.__bases__: + if _declared_mapping_info(c) is not None and \ + not _get_immediate_cls_attr( + c, '_sa_decl_prepare_nocascade'): + self.inherits = c + break + else: + self.inherits = None + + if table is None and self.inherits is None and \ + not _get_immediate_cls_attr(cls, '__no_table__'): + + raise exc.InvalidRequestError( + "Class %r does not have a __table__ or __tablename__ " + "specified and does not inherit from an existing " + "table-mapped class." % cls + ) + elif self.inherits: + inherited_mapper = _declared_mapping_info(self.inherits) + inherited_table = inherited_mapper.local_table + inherited_mapped_table = inherited_mapper.mapped_table + + if table is None: + # single table inheritance. + # ensure no table args + if table_args: + raise exc.ArgumentError( + "Can't place __table_args__ on an inherited class " + "with no table." + ) + # add any columns declared here to the inherited table. + for c in declared_columns: + if c.primary_key: + raise exc.ArgumentError( + "Can't place primary key columns on an inherited " + "class with no table." + ) + if c.name in inherited_table.c: + if inherited_table.c[c.name] is c: + continue + raise exc.ArgumentError( + "Column '%s' on class %s conflicts with " + "existing column '%s'" % + (c, cls, inherited_table.c[c.name]) + ) + inherited_table.append_column(c) + if inherited_mapped_table is not None and \ + inherited_mapped_table is not inherited_table: + inherited_mapped_table._refresh_for_new_column(c) def _prepare_mapper_arguments(self): properties = self.properties @@ -401,20 +484,31 @@ class _MapperConfig(object): properties[k] = [col] + p.columns result_mapper_args = mapper_args.copy() result_mapper_args['properties'] = properties - return result_mapper_args + self.mapper_args = result_mapper_args def map(self): - mapper_args = self._prepare_mapper_arguments() - self.cls.__mapper__ = self.mapper_cls( + self._prepare_mapper_arguments() + if hasattr(self.cls, '__mapper_cls__'): + mapper_cls = util.unbound_method_to_callable( + self.cls.__mapper_cls__) + else: + mapper_cls = mapper + + self.cls.__mapper__ = mp_ = mapper_cls( self.cls, self.local_table, - **mapper_args + **self.mapper_args ) + del mp_.class_manager.info['declared_attr_reg'] + return mp_ class _DeferredMapperConfig(_MapperConfig): _configs = util.OrderedDict() + def _early_mapping(self): + pass + @property def cls(self): return self._cls() @@ -466,7 +560,7 @@ class _DeferredMapperConfig(_MapperConfig): def map(self): self._configs.pop(self._cls, None) - super(_DeferredMapperConfig, self).map() + return super(_DeferredMapperConfig, self).map() def _add_attribute(cls, key, value): diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index a59a38a5b..eaade21ec 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -1080,6 +1080,9 @@ class Mapper(InspectionAttr): auto-session attachment logic. """ + + # when using declarative as of 1.0, the register_class has + # already happened from within declarative. manager = attributes.manager_of_class(self.class_) if self.non_primary: @@ -1102,18 +1105,14 @@ class Mapper(InspectionAttr): "create a non primary Mapper. clear_mappers() will " "remove *all* current mappers from all classes." % self.class_) - # else: - # a ClassManager may already exist as - # ClassManager.instrument_attribute() creates - # new managers for each subclass if they don't yet exist. + + if manager is None: + manager = instrumentation.register_class(self.class_) _mapper_registry[self] = True self.dispatch.instrument_class(self, self.class_) - if manager is None: - manager = instrumentation.register_class(self.class_) - self.class_manager = manager manager.mapper = self diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index d9fd37f92..26d7c428e 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -1222,8 +1222,10 @@ class Column(SchemaItem, ColumnClause): existing = getattr(self, 'table', None) if existing is not None and existing is not table: raise exc.ArgumentError( - "Column object already assigned to Table '%s'" % - existing.description) + "Column object '%s' already assigned to Table '%s'" % ( + self.key, + existing.description + )) if self.key in table._columns: col = table._columns.get(self.key) diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py index c963b18c3..dfed5b90a 100644 --- a/lib/sqlalchemy/util/__init__.py +++ b/lib/sqlalchemy/util/__init__.py @@ -33,7 +33,8 @@ from .langhelpers import iterate_attributes, class_hierarchy, \ duck_type_collection, assert_arg_type, symbol, dictlike_iteritems,\ classproperty, set_creation_order, warn_exception, warn, NoneType,\ constructor_copy, methods_equivalent, chop_traceback, asint,\ - generic_repr, counter, PluginLoader, hybridmethod, safe_reraise,\ + generic_repr, counter, PluginLoader, hybridproperty, hybridmethod, \ + safe_reraise,\ get_callable_argspec, only_once, attrsetter, ellipses_string, \ warn_limited diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index 76f85f605..95369783d 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -1090,10 +1090,23 @@ class classproperty(property): return desc.fget(cls) +class hybridproperty(object): + def __init__(self, func): + self.func = func + + def __get__(self, instance, owner): + if instance is None: + clsval = self.func(owner) + clsval.__doc__ = self.func.__doc__ + return clsval + else: + return self.func(instance) + + class hybridmethod(object): """Decorate a function as cls- or instance- level.""" - def __init__(self, func, expr=None): + def __init__(self, func): self.func = func def __get__(self, instance, owner): diff --git a/test/ext/declarative/test_inheritance.py b/test/ext/declarative/test_inheritance.py index e450a1c43..5a99c9c5a 100644 --- a/test/ext/declarative/test_inheritance.py +++ b/test/ext/declarative/test_inheritance.py @@ -11,7 +11,7 @@ from sqlalchemy.orm import relationship, create_session, class_mapper, \ polymorphic_union, deferred, Session from sqlalchemy.ext.declarative import declared_attr, AbstractConcreteBase, \ ConcreteBase, has_inherited_table -from sqlalchemy.testing import fixtures +from sqlalchemy.testing import fixtures, mock Base = None @@ -1303,3 +1303,88 @@ class ConcreteExtensionConfigTest( "b.b_data AS b_data, 'b' AS type FROM b) AS pjoin " "ON pjoin.a_id = a.id" ) + + def test_prop_on_base(self): + """test [ticket:2670] """ + + counter = mock.Mock() + + class Something(Base): + __tablename__ = 'something' + id = Column(Integer, primary_key=True) + + class AbstractConcreteAbstraction(AbstractConcreteBase, Base): + id = Column(Integer, primary_key=True) + x = Column(Integer) + y = Column(Integer) + + @declared_attr + def something_id(cls): + return Column(ForeignKey(Something.id)) + + @declared_attr + def something(cls): + counter(cls, "something") + return relationship("Something") + + @declared_attr + def something_else(cls): + counter(cls, "something_else") + return relationship("Something") + + class ConcreteConcreteAbstraction(AbstractConcreteAbstraction): + __tablename__ = 'cca' + __mapper_args__ = { + 'polymorphic_identity': 'ccb', + 'concrete': True} + + # concrete is mapped, the abstract base is not (yet) + assert ConcreteConcreteAbstraction.__mapper__ + assert not hasattr(AbstractConcreteAbstraction, '__mapper__') + + session = Session() + self.assert_compile( + session.query(ConcreteConcreteAbstraction).filter( + ConcreteConcreteAbstraction.something.has(id=1)), + "SELECT cca.id AS cca_id, cca.x AS cca_x, cca.y AS cca_y, " + "cca.something_id AS cca_something_id FROM cca WHERE EXISTS " + "(SELECT 1 FROM something WHERE something.id = cca.something_id " + "AND something.id = :id_1)" + ) + + # now it is + assert AbstractConcreteAbstraction.__mapper__ + + self.assert_compile( + session.query(ConcreteConcreteAbstraction).filter( + ConcreteConcreteAbstraction.something_else.has(id=1)), + "SELECT cca.id AS cca_id, cca.x AS cca_x, cca.y AS cca_y, " + "cca.something_id AS cca_something_id FROM cca WHERE EXISTS " + "(SELECT 1 FROM something WHERE something.id = cca.something_id " + "AND something.id = :id_1)" + ) + + self.assert_compile( + session.query(AbstractConcreteAbstraction).filter( + AbstractConcreteAbstraction.something.has(id=1)), + "SELECT pjoin.id AS pjoin_id, pjoin.x AS pjoin_x, " + "pjoin.y AS pjoin_y, pjoin.something_id AS pjoin_something_id, " + "pjoin.type AS pjoin_type FROM " + "(SELECT cca.id AS id, cca.x AS x, cca.y AS y, " + "cca.something_id AS something_id, 'ccb' AS type FROM cca) " + "AS pjoin WHERE EXISTS (SELECT 1 FROM something " + "WHERE something.id = pjoin.something_id AND something.id = :id_1)" + ) + + self.assert_compile( + session.query(AbstractConcreteAbstraction).filter( + AbstractConcreteAbstraction.something_else.has(id=1)), + "SELECT pjoin.id AS pjoin_id, pjoin.x AS pjoin_x, " + "pjoin.y AS pjoin_y, pjoin.something_id AS pjoin_something_id, " + "pjoin.type AS pjoin_type FROM " + "(SELECT cca.id AS id, cca.x AS x, cca.y AS y, " + "cca.something_id AS something_id, 'ccb' AS type FROM cca) " + "AS pjoin WHERE EXISTS (SELECT 1 FROM something " + "WHERE something.id = pjoin.something_id AND something.id = :id_1)" + ) + diff --git a/test/ext/declarative/test_mixin.py b/test/ext/declarative/test_mixin.py index 0d7cb7169..db86927a1 100644 --- a/test/ext/declarative/test_mixin.py +++ b/test/ext/declarative/test_mixin.py @@ -3,15 +3,15 @@ from sqlalchemy.testing import eq_, assert_raises, \ from sqlalchemy.ext import declarative as decl import sqlalchemy as sa from sqlalchemy import testing -from sqlalchemy import Integer, String, ForeignKey +from sqlalchemy import Integer, String, ForeignKey, select, func from sqlalchemy.testing.schema import Table, Column from sqlalchemy.orm import relationship, create_session, class_mapper, \ configure_mappers, clear_mappers, \ - deferred, column_property, \ - Session + deferred, column_property, Session, base as orm_base from sqlalchemy.util import classproperty from sqlalchemy.ext.declarative import declared_attr -from sqlalchemy.testing import fixtures +from sqlalchemy.testing import fixtures, mock +from sqlalchemy.testing.util import gc_collect Base = None @@ -1302,6 +1302,197 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase): self._test_relationship(True) +class DeclaredAttrTest(DeclarativeTestBase, testing.AssertsCompiledSQL): + __dialect__ = 'default' + + def test_singleton_behavior_within_decl(self): + counter = mock.Mock() + + class Mixin(object): + @declared_attr + def my_prop(cls): + counter(cls) + return Column('x', Integer) + + class A(Base, Mixin): + __tablename__ = 'a' + id = Column(Integer, primary_key=True) + + @declared_attr + def my_other_prop(cls): + return column_property(cls.my_prop + 5) + + eq_(counter.mock_calls, [mock.call(A)]) + + class B(Base, Mixin): + __tablename__ = 'b' + id = Column(Integer, primary_key=True) + + @declared_attr + def my_other_prop(cls): + return column_property(cls.my_prop + 5) + + eq_( + counter.mock_calls, + [mock.call(A), mock.call(B)]) + + # this is why we need singleton-per-class behavior. We get + # an un-bound "x" column otherwise here, because my_prop() generates + # multiple columns. + a_col = A.my_other_prop.__clause_element__().element.left + b_col = B.my_other_prop.__clause_element__().element.left + is_(a_col.table, A.__table__) + is_(b_col.table, B.__table__) + is_(a_col, A.__table__.c.x) + is_(b_col, B.__table__.c.x) + + s = Session() + self.assert_compile( + s.query(A), + "SELECT a.x AS a_x, a.x + :x_1 AS anon_1, a.id AS a_id FROM a" + ) + self.assert_compile( + s.query(B), + "SELECT b.x AS b_x, b.x + :x_1 AS anon_1, b.id AS b_id FROM b" + ) + + + def test_singleton_gc(self): + counter = mock.Mock() + + class Mixin(object): + @declared_attr + def my_prop(cls): + counter(cls.__name__) + return Column('x', Integer) + + class A(Base, Mixin): + __tablename__ = 'b' + id = Column(Integer, primary_key=True) + + @declared_attr + def my_other_prop(cls): + return column_property(cls.my_prop + 5) + + eq_(counter.mock_calls, [mock.call("A")]) + del A + gc_collect() + assert "A" not in Base._decl_class_registry + + def test_can_we_access_the_mixin_straight(self): + class Mixin(object): + @declared_attr + def my_prop(cls): + return Column('x', Integer) + + assert_raises_message( + sa.exc.SAWarning, + "Unmanaged access of declarative attribute my_prop " + "from non-mapped class Mixin", + getattr, Mixin, "my_prop" + ) + + def test_property_noncascade(self): + counter = mock.Mock() + + class Mixin(object): + @declared_attr + def my_prop(cls): + counter(cls) + return column_property(cls.x + 2) + + class A(Base, Mixin): + __tablename__ = 'a' + + id = Column(Integer, primary_key=True) + x = Column(Integer) + + class B(A): + pass + + eq_(counter.mock_calls, [mock.call(A)]) + + def test_property_cascade(self): + counter = mock.Mock() + + class Mixin(object): + @declared_attr.cascading + def my_prop(cls): + counter(cls) + return column_property(cls.x + 2) + + class A(Base, Mixin): + __tablename__ = 'a' + + id = Column(Integer, primary_key=True) + x = Column(Integer) + + class B(A): + pass + + eq_(counter.mock_calls, [mock.call(A), mock.call(B)]) + + def test_column_pre_map(self): + counter = mock.Mock() + + class Mixin(object): + @declared_attr + def my_col(cls): + counter(cls) + assert not orm_base._mapper_or_none(cls) + return Column('x', Integer) + + class A(Base, Mixin): + __tablename__ = 'a' + + id = Column(Integer, primary_key=True) + + eq_(counter.mock_calls, [mock.call(A)]) + + def test_mixin_attr_refers_to_column_copies(self): + # this @declared_attr can refer to User.id + # freely because we now do the "copy column" operation + # before the declared_attr is invoked. + + counter = mock.Mock() + + class HasAddressCount(object): + id = Column(Integer, primary_key=True) + + @declared_attr + def address_count(cls): + counter(cls.id) + return column_property( + select([func.count(Address.id)]). + where(Address.user_id == cls.id). + as_scalar() + ) + + class Address(Base): + __tablename__ = 'address' + id = Column(Integer, primary_key=True) + user_id = Column(ForeignKey('user.id')) + + class User(Base, HasAddressCount): + __tablename__ = 'user' + + eq_( + counter.mock_calls, + [mock.call(User.id)] + ) + + sess = Session() + self.assert_compile( + sess.query(User).having(User.address_count > 5), + 'SELECT (SELECT count(address.id) AS ' + 'count_1 FROM address WHERE address.user_id = "user".id) ' + 'AS anon_1, "user".id AS user_id FROM "user" ' + 'HAVING (SELECT count(address.id) AS ' + 'count_1 FROM address WHERE address.user_id = "user".id) ' + '> :param_1' + ) + + class AbstractTest(DeclarativeTestBase): def test_abstract_boolean(self): -- cgit v1.2.1 From 19ec6c33681ee4d7793b7c6a9301425cd34abb0b Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 25 Sep 2014 21:16:13 -0400 Subject: - "column already assigned" message has been updated in 7f82c55f --- test/sql/test_metadata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py index 304e2cc5f..6b8e1bb40 100644 --- a/test/sql/test_metadata.py +++ b/test/sql/test_metadata.py @@ -2124,7 +2124,7 @@ class ColumnDefinitionTest(AssertsCompiledSQL, fixtures.TestBase): assert_raises_message( exc.ArgumentError, - "Column object already assigned to Table 't'", + "Column object 'x' already assigned to Table 't'", Table, 'q', MetaData(), c) def test_incomplete_key(self): -- cgit v1.2.1 From cb0c1eda24e1546fcdc9b4e46adc1c5fb14b60d0 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 26 Sep 2014 10:59:39 -0400 Subject: comments --- lib/sqlalchemy/ext/declarative/base.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/lib/sqlalchemy/ext/declarative/base.py b/lib/sqlalchemy/ext/declarative/base.py index 9cf07e208..291608b6c 100644 --- a/lib/sqlalchemy/ext/declarative/base.py +++ b/lib/sqlalchemy/ext/declarative/base.py @@ -254,7 +254,6 @@ class _MapperConfig(object): for k in list(dict_): - # TODO: improve this ? all dunders ? if k in ('__table__', '__tablename__', '__mapper_args__'): continue @@ -276,12 +275,18 @@ class _MapperConfig(object): "%s: possibly a copy-and-paste error with a comma " "left at the end of the line?" % k) continue - if not isinstance(value, (Column, MapperProperty)): + elif not isinstance(value, (Column, MapperProperty)): + # using @declared_attr for some object that + # isn't Column/MapperProperty; remove from the dict_ + # and place the evaulated value onto the class. if not k.startswith('__'): dict_.pop(k) setattr(cls, k, value) continue - if k == 'metadata': + # we expect to see the name 'metadata' in some valid cases; + # however at this point we see it's assigned to something trying + # to be mapped, so raise for that. + elif k == 'metadata': raise exc.InvalidRequestError( "Attribute name 'metadata' is reserved " "for the MetaData instance when using a " -- cgit v1.2.1 From b1a956d4210c2bb06051a4a8b0d2e75d7c471ecd Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 26 Sep 2014 12:50:18 -0400 Subject: - add explicit warning re: polymorphic_on, cascading is not supported at this time. ref #3214 --- doc/build/orm/inheritance.rst | 6 ++++++ lib/sqlalchemy/orm/mapper.py | 6 ++++++ 2 files changed, 12 insertions(+) diff --git a/doc/build/orm/inheritance.rst b/doc/build/orm/inheritance.rst index 642f3420c..9f01a3e24 100644 --- a/doc/build/orm/inheritance.rst +++ b/doc/build/orm/inheritance.rst @@ -45,6 +45,12 @@ this column is to act as the **discriminator**, and stores a value which indicates the type of object represented within the row. The column may be of any datatype, though string and integer are the most common. +.. warning:: + + Currently, **only one discriminator column may be set**, typically + on the base-most class in the hierarchy. "Cascading" polymorphic columns + are not yet supported. + The discriminator column is only needed if polymorphic loading is desired, as is usually the case. It is not strictly necessary that it be present directly on the base mapped table, and can instead be defined on a diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index eaade21ec..2ab239f86 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -426,6 +426,12 @@ class Mapper(InspectionAttr): thus persisting the value to the ``discriminator`` column in the database. + .. warning:: + + Currently, **only one discriminator column may be set**, typically + on the base-most class in the hierarchy. "Cascading" polymorphic + columns are not yet supported. + .. seealso:: :ref:`inheritance_toplevel` -- cgit v1.2.1 From fbddf193a684ffe660c94c28e4c26e187111b21c Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 26 Sep 2014 14:55:36 -0400 Subject: - Fixed bug where a "branched" connection, that is the kind you get when you call :meth:`.Connection.connect`, would not share invalidation status with the parent. The architecture of branching has been tweaked a bit so that the branched connection defers to the parent for all invalidation status and operations. fixes #3215 --- doc/build/changelog/changelog_10.rst | 10 +++++++++ lib/sqlalchemy/engine/base.py | 43 +++++++++++++++++++++++++++--------- test/engine/test_reconnect.py | 32 +++++++++++++++++++++++++++ 3 files changed, 74 insertions(+), 11 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 536288c8f..a4f3dd6e5 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,16 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: bug, sql, engine + :tickets: 3215 + + Fixed bug where a "branched" connection, that is the kind you get + when you call :meth:`.Connection.connect`, would not share invalidation + status with the parent. The architecture of branching has been tweaked + a bit so that the branched connection defers to the parent for + all invalidation status and operations. + .. change:: :tags: bug, declarative :tickets: 2670 diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index d2cc8890f..ec7aed1c3 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -45,7 +45,7 @@ class Connection(Connectable): """ def __init__(self, engine, connection=None, close_with_result=False, - _branch=False, _execution_options=None, + _branch_from=None, _execution_options=None, _dispatch=None, _has_events=None): """Construct a new Connection. @@ -61,7 +61,8 @@ class Connection(Connectable): self.__transaction = None self.should_close_with_result = close_with_result self.__savepoint_seq = 0 - self.__branch = _branch + self.__branch_from = _branch_from + self.__branch = _branch_from is not None self.__invalid = False self.__can_reconnect = True if _dispatch: @@ -82,7 +83,7 @@ class Connection(Connectable): self._execution_options = engine._execution_options if self._has_events or self.engine._has_events: - self.dispatch.engine_connect(self, _branch) + self.dispatch.engine_connect(self, self.__branch) def _branch(self): """Return a new Connection which references this Connection's @@ -92,13 +93,26 @@ class Connection(Connectable): This is used to execute "sub" statements within a single execution, usually an INSERT statement. """ + if self.__branch_from: + return self.__branch_from._branch() + else: + return self.engine._connection_cls( + self.engine, + self.__connection, + _branch_from=self, + _has_events=self._has_events, + _dispatch=self.dispatch) + + @property + def _root(self): + """return the 'root' connection. - return self.engine._connection_cls( - self.engine, - self.__connection, - _branch=True, - _has_events=self._has_events, - _dispatch=self.dispatch) + Returns 'self' if this connection is not a branch, else + returns the root connection from which we ultimately branched.""" + if self.__branch_from: + return self.__branch_from + else: + return self def _clone(self): """Create a shallow copy of this Connection. @@ -218,13 +232,13 @@ class Connection(Connectable): """Return True if this connection is closed.""" return '_Connection__connection' not in self.__dict__ \ - and not self.__can_reconnect + and not self._root.__can_reconnect @property def invalidated(self): """Return True if this connection was invalidated.""" - return self.__invalid + return self._root.__invalid @property def connection(self): @@ -236,6 +250,9 @@ class Connection(Connectable): return self._revalidate_connection() def _revalidate_connection(self): + if self.__branch_from: + return self._root._revalidate_connection() + if self.__can_reconnect and self.__invalid: if self.__transaction is not None: raise exc.InvalidRequestError( @@ -343,6 +360,10 @@ class Connection(Connectable): :ref:`pool_connection_invalidation` """ + if self.__branch_from: + self._root.invalidate() + return + if self.invalidated: return diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py index c82cca5a1..26a607301 100644 --- a/test/engine/test_reconnect.py +++ b/test/engine/test_reconnect.py @@ -504,6 +504,38 @@ class RealReconnectTest(fixtures.TestBase): # pool isn't replaced assert self.engine.pool is p2 + def test_branched_invalidate_branch_to_parent(self): + c1 = self.engine.connect() + + c1_branch = c1.connect() + eq_(c1_branch.execute(select([1])).scalar(), 1) + + self.engine.test_shutdown() + + _assert_invalidated(c1_branch.execute, select([1])) + assert c1.invalidated + assert c1_branch.invalidated + + c1_branch._revalidate_connection() + assert not c1.invalidated + assert not c1_branch.invalidated + + def test_branched_invalidate_parent_to_branch(self): + c1 = self.engine.connect() + + c1_branch = c1.connect() + eq_(c1_branch.execute(select([1])).scalar(), 1) + + self.engine.test_shutdown() + + _assert_invalidated(c1.execute, select([1])) + assert c1.invalidated + assert c1_branch.invalidated + + c1._revalidate_connection() + assert not c1.invalidated + assert not c1_branch.invalidated + def test_ensure_is_disconnect_gets_connection(self): def is_disconnect(e, conn, cursor): # connection is still present -- cgit v1.2.1 From b89523f0b75e8d39bcbd8a5c07015e9df4ef5e2f Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 26 Sep 2014 16:25:26 -0400 Subject: - Fixed bug where a "branched" connection, that is the kind you get when you call :meth:`.Connection.connect`, would not share transaction status with the parent. The architecture of branching has been tweaked a bit so that the branched connection defers to the parent for all transactional status and operations. fixes #3190 --- doc/build/changelog/changelog_10.rst | 10 +++ lib/sqlalchemy/engine/base.py | 124 ++++++++++++++++++++++++----------- test/engine/test_execute.py | 11 ++++ test/engine/test_reconnect.py | 36 +++++++--- test/engine/test_transaction.py | 73 +++++++++++++++++++++ 5 files changed, 207 insertions(+), 47 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index a4f3dd6e5..4d5ab1f06 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -31,6 +31,16 @@ a bit so that the branched connection defers to the parent for all invalidation status and operations. + .. change:: + :tags: bug, sql, engine + :tickets: 3190 + + Fixed bug where a "branched" connection, that is the kind you get + when you call :meth:`.Connection.connect`, would not share transaction + status with the parent. The architecture of branching has been tweaked + a bit so that the branched connection defers to the parent for + all transactional status and operations. + .. change:: :tags: bug, declarative :tickets: 2670 diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index ec7aed1c3..05bb1f4e5 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -57,29 +57,35 @@ class Connection(Connectable): """ self.engine = engine self.dialect = engine.dialect - self.__connection = connection or engine.raw_connection() - self.__transaction = None - self.should_close_with_result = close_with_result - self.__savepoint_seq = 0 self.__branch_from = _branch_from self.__branch = _branch_from is not None - self.__invalid = False - self.__can_reconnect = True - if _dispatch: + + if _branch_from: + self.__connection = connection + self._execution_options = _execution_options + self._echo = _branch_from._echo + self.should_close_with_result = False self.dispatch = _dispatch - elif _has_events is None: - # if _has_events is sent explicitly as False, - # then don't join the dispatch of the engine; we don't - # want to handle any of the engine's events in that case. - self.dispatch = self.dispatch._join(engine.dispatch) - self._has_events = _has_events or ( - _has_events is None and engine._has_events) - - self._echo = self.engine._should_log_info() - if _execution_options: - self._execution_options =\ - engine._execution_options.union(_execution_options) + self._has_events = _branch_from._has_events else: + self.__connection = connection \ + if connection is not None else engine.raw_connection() + self.__transaction = None + self.__savepoint_seq = 0 + self.should_close_with_result = close_with_result + self.__invalid = False + self.__can_reconnect = True + self._echo = self.engine._should_log_info() + + if _has_events is None: + # if _has_events is sent explicitly as False, + # then don't join the dispatch of the engine; we don't + # want to handle any of the engine's events in that case. + self.dispatch = self.dispatch._join(engine.dispatch) + self._has_events = _has_events or ( + _has_events is None and engine._has_events) + + assert not _execution_options self._execution_options = engine._execution_options if self._has_events or self.engine._has_events: @@ -90,8 +96,16 @@ class Connection(Connectable): engine and connection; but does not have close_with_result enabled, and also whose close() method does nothing. - This is used to execute "sub" statements within a single execution, - usually an INSERT statement. + The Core uses this very sparingly, only in the case of + custom SQL default functions that are to be INSERTed as the + primary key of a row where we need to get the value back, so we have + to invoke it distinctly - this is a very uncommon case. + + Userland code accesses _branch() when the connect() or + contextual_connect() methods are called. The branched connection + acts as much as possible like the parent, except that it stays + connected when a close() event occurs. + """ if self.__branch_from: return self.__branch_from._branch() @@ -100,6 +114,7 @@ class Connection(Connectable): self.engine, self.__connection, _branch_from=self, + _execution_options=self._execution_options, _has_events=self._has_events, _dispatch=self.dispatch) @@ -108,7 +123,10 @@ class Connection(Connectable): """return the 'root' connection. Returns 'self' if this connection is not a branch, else - returns the root connection from which we ultimately branched.""" + returns the root connection from which we ultimately branched. + + """ + if self.__branch_from: return self.__branch_from else: @@ -232,7 +250,7 @@ class Connection(Connectable): """Return True if this connection is closed.""" return '_Connection__connection' not in self.__dict__ \ - and not self._root.__can_reconnect + and not self.__can_reconnect @property def invalidated(self): @@ -251,7 +269,7 @@ class Connection(Connectable): def _revalidate_connection(self): if self.__branch_from: - return self._root._revalidate_connection() + return self.__branch_from._revalidate_connection() if self.__can_reconnect and self.__invalid: if self.__transaction is not None: @@ -360,9 +378,6 @@ class Connection(Connectable): :ref:`pool_connection_invalidation` """ - if self.__branch_from: - self._root.invalidate() - return if self.invalidated: return @@ -370,10 +385,10 @@ class Connection(Connectable): if self.closed: raise exc.ResourceClosedError("This Connection is closed") - if self._connection_is_valid: - self.__connection.invalidate(exception) - del self.__connection - self.__invalid = True + if self._root._connection_is_valid: + self._root.__connection.invalidate(exception) + del self._root.__connection + self._root.__invalid = True def detach(self): """Detach the underlying DB-API connection from its connection pool. @@ -436,6 +451,8 @@ class Connection(Connectable): :class:`.Engine`. """ + if self.__branch_from: + return self.__branch_from.begin() if self.__transaction is None: self.__transaction = RootTransaction(self) @@ -457,6 +474,9 @@ class Connection(Connectable): See also :meth:`.Connection.begin`, :meth:`.Connection.begin_twophase`. """ + if self.__branch_from: + return self.__branch_from.begin_nested() + if self.__transaction is None: self.__transaction = RootTransaction(self) else: @@ -480,6 +500,9 @@ class Connection(Connectable): """ + if self.__branch_from: + return self.__branch_from.begin_twophase(xid=xid) + if self.__transaction is not None: raise exc.InvalidRequestError( "Cannot start a two phase transaction when a transaction " @@ -500,10 +523,11 @@ class Connection(Connectable): def in_transaction(self): """Return True if a transaction is in progress.""" - - return self.__transaction is not None + return self._root.__transaction is not None def _begin_impl(self, transaction): + assert not self.__branch_from + if self._echo: self.engine.logger.info("BEGIN (implicit)") @@ -518,6 +542,8 @@ class Connection(Connectable): self._handle_dbapi_exception(e, None, None, None, None) def _rollback_impl(self): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.rollback(self) @@ -537,6 +563,8 @@ class Connection(Connectable): self.__transaction = None def _commit_impl(self, autocommit=False): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.commit(self) @@ -553,6 +581,8 @@ class Connection(Connectable): self.__transaction = None def _savepoint_impl(self, name=None): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.savepoint(self, name) @@ -564,6 +594,8 @@ class Connection(Connectable): return name def _rollback_to_savepoint_impl(self, name, context): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.rollback_savepoint(self, name, context) @@ -572,6 +604,8 @@ class Connection(Connectable): self.__transaction = context def _release_savepoint_impl(self, name, context): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.release_savepoint(self, name, context) @@ -580,6 +614,8 @@ class Connection(Connectable): self.__transaction = context def _begin_twophase_impl(self, transaction): + assert not self.__branch_from + if self._echo: self.engine.logger.info("BEGIN TWOPHASE (implicit)") if self._has_events or self.engine._has_events: @@ -592,6 +628,8 @@ class Connection(Connectable): self.connection._reset_agent = transaction def _prepare_twophase_impl(self, xid): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.prepare_twophase(self, xid) @@ -600,6 +638,8 @@ class Connection(Connectable): self.engine.dialect.do_prepare_twophase(self, xid) def _rollback_twophase_impl(self, xid, is_prepared): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.rollback_twophase(self, xid, is_prepared) @@ -616,6 +656,8 @@ class Connection(Connectable): self.__transaction = None def _commit_twophase_impl(self, xid, is_prepared): + assert not self.__branch_from + if self._has_events or self.engine._has_events: self.dispatch.commit_twophase(self, xid, is_prepared) @@ -653,13 +695,21 @@ class Connection(Connectable): and will allow no further operations. """ + if self.__branch_from: + try: + del self.__connection + except AttributeError: + pass + finally: + self.__can_reconnect = False + return try: conn = self.__connection except AttributeError: pass else: - if not self.__branch: - conn.close() + + conn.close() if conn._reset_agent is self.__transaction: conn._reset_agent = None @@ -1014,8 +1064,8 @@ class Connection(Connectable): result.rowcount result.close(_autoclose_connection=False) - if self.__transaction is None and context.should_autocommit: - self._commit_impl(autocommit=True) + if context.should_autocommit and self._root.__transaction is None: + self._root._commit_impl(autocommit=True) if result.closed and self.should_close_with_result: self.close() diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py index e14a4fd2a..219a145c6 100644 --- a/test/engine/test_execute.py +++ b/test/engine/test_execute.py @@ -982,6 +982,17 @@ class ExecutionOptionsTest(fixtures.TestBase): eq_(c1._execution_options, {"foo": "bar"}) eq_(c2._execution_options, {"foo": "bar", "bat": "hoho"}) + def test_branched_connection_execution_options(self): + engine = testing_engine("sqlite://") + + conn = engine.connect() + c2 = conn.execution_options(foo="bar") + c2_branch = c2.connect() + eq_( + c2_branch._execution_options, + {"foo": "bar"} + ) + class AlternateResultProxyTest(fixtures.TestBase): __requires__ = ('sqlite', ) diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py index 26a607301..4500ada6a 100644 --- a/test/engine/test_reconnect.py +++ b/test/engine/test_reconnect.py @@ -8,7 +8,7 @@ from sqlalchemy import testing from sqlalchemy.testing import engines from sqlalchemy.testing import fixtures from sqlalchemy.testing.engines import testing_engine -from sqlalchemy.testing.mock import Mock, call +from sqlalchemy.testing.mock import Mock, call, patch class MockError(Exception): @@ -507,18 +507,21 @@ class RealReconnectTest(fixtures.TestBase): def test_branched_invalidate_branch_to_parent(self): c1 = self.engine.connect() - c1_branch = c1.connect() - eq_(c1_branch.execute(select([1])).scalar(), 1) + with patch.object(self.engine.pool, "logger") as logger: + c1_branch = c1.connect() + eq_(c1_branch.execute(select([1])).scalar(), 1) - self.engine.test_shutdown() + self.engine.test_shutdown() - _assert_invalidated(c1_branch.execute, select([1])) - assert c1.invalidated - assert c1_branch.invalidated + _assert_invalidated(c1_branch.execute, select([1])) + assert c1.invalidated + assert c1_branch.invalidated - c1_branch._revalidate_connection() - assert not c1.invalidated - assert not c1_branch.invalidated + c1_branch._revalidate_connection() + assert not c1.invalidated + assert not c1_branch.invalidated + + assert "Invalidate connection" in logger.mock_calls[0][1][0] def test_branched_invalidate_parent_to_branch(self): c1 = self.engine.connect() @@ -536,6 +539,19 @@ class RealReconnectTest(fixtures.TestBase): assert not c1.invalidated assert not c1_branch.invalidated + def test_branch_invalidate_state(self): + c1 = self.engine.connect() + + c1_branch = c1.connect() + + eq_(c1_branch.execute(select([1])).scalar(), 1) + + self.engine.test_shutdown() + + _assert_invalidated(c1_branch.execute, select([1])) + assert not c1_branch.closed + assert not c1_branch._connection_is_valid + def test_ensure_is_disconnect_gets_connection(self): def is_disconnect(e, conn, cursor): # connection is still present diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py index d921e9ead..fbaf01db7 100644 --- a/test/engine/test_transaction.py +++ b/test/engine/test_transaction.py @@ -133,6 +133,79 @@ class TransactionTest(fixtures.TestBase): finally: connection.close() + def test_branch_nested_rollback(self): + connection = testing.db.connect() + try: + connection.begin() + branched = connection.connect() + assert branched.in_transaction() + branched.execute(users.insert(), user_id=1, user_name='user1') + nested = branched.begin() + branched.execute(users.insert(), user_id=2, user_name='user2') + nested.rollback() + assert not connection.in_transaction() + eq_(connection.scalar("select count(*) from query_users"), 0) + + finally: + connection.close() + + def test_branch_orig_rollback(self): + connection = testing.db.connect() + try: + branched = connection.connect() + branched.execute(users.insert(), user_id=1, user_name='user1') + nested = branched.begin() + assert branched.in_transaction() + branched.execute(users.insert(), user_id=2, user_name='user2') + nested.rollback() + eq_(connection.scalar("select count(*) from query_users"), 1) + + finally: + connection.close() + + def test_branch_autocommit(self): + connection = testing.db.connect() + try: + branched = connection.connect() + branched.execute(users.insert(), user_id=1, user_name='user1') + finally: + connection.close() + eq_(testing.db.scalar("select count(*) from query_users"), 1) + + @testing.requires.savepoints + def test_branch_savepoint_rollback(self): + connection = testing.db.connect() + try: + trans = connection.begin() + branched = connection.connect() + assert branched.in_transaction() + branched.execute(users.insert(), user_id=1, user_name='user1') + nested = branched.begin_nested() + branched.execute(users.insert(), user_id=2, user_name='user2') + nested.rollback() + assert connection.in_transaction() + trans.commit() + eq_(connection.scalar("select count(*) from query_users"), 1) + + finally: + connection.close() + + @testing.requires.two_phase_transactions + def test_branch_twophase_rollback(self): + connection = testing.db.connect() + try: + branched = connection.connect() + assert not branched.in_transaction() + branched.execute(users.insert(), user_id=1, user_name='user1') + nested = branched.begin_twophase() + branched.execute(users.insert(), user_id=2, user_name='user2') + nested.rollback() + assert not connection.in_transaction() + eq_(connection.scalar("select count(*) from query_users"), 1) + + finally: + connection.close() + def test_retains_through_options(self): connection = testing.db.connect() try: -- cgit v1.2.1 From 573877b4bc4fabcdae8b22e5257fc80de478a507 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 26 Sep 2014 21:41:15 -0400 Subject: - repair autorollback for branches --- lib/sqlalchemy/engine/base.py | 4 ++-- test/engine/test_transaction.py | 12 ++++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index 05bb1f4e5..e5feda138 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -673,8 +673,8 @@ class Connection(Connectable): self.__transaction = None def _autorollback(self): - if not self.in_transaction(): - self._rollback_impl() + if not self._root.in_transaction(): + self._root._rollback_impl() def close(self): """Close this :class:`.Connection`. diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py index fbaf01db7..b3b17e75a 100644 --- a/test/engine/test_transaction.py +++ b/test/engine/test_transaction.py @@ -149,6 +149,18 @@ class TransactionTest(fixtures.TestBase): finally: connection.close() + def test_branch_autorollback(self): + connection = testing.db.connect() + try: + branched = connection.connect() + branched.execute(users.insert(), user_id=1, user_name='user1') + try: + branched.execute(users.insert(), user_id=1, user_name='user1') + except exc.DBAPIError: + pass + finally: + connection.close() + def test_branch_orig_rollback(self): connection = testing.db.connect() try: -- cgit v1.2.1 From b275ac099fa2ef9e5da97b5b710f7fc694016012 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 27 Sep 2014 16:12:04 -0400 Subject: - cyclomatic complexity; break up reflecttable(), goes from E to B --- lib/sqlalchemy/engine/reflection.py | 113 +++++++++++++++++++++++------------- 1 file changed, 74 insertions(+), 39 deletions(-) diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index c0a3240a5..838a5bdd2 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -489,55 +489,83 @@ class Inspector(object): for col_d in self.get_columns( table_name, schema, **table.dialect_kwargs): found_table = True - orig_name = col_d['name'] - table.dispatch.column_reflect(self, table, col_d) + self._reflect_column( + table, col_d, include_columns, + exclude_columns, cols_by_orig_name) - name = col_d['name'] - if include_columns and name not in include_columns: - continue - if exclude_columns and name in exclude_columns: - continue + if not found_table: + raise exc.NoSuchTableError(table.name) - coltype = col_d['type'] + self._reflect_pk( + table_name, schema, table, cols_by_orig_name, exclude_columns) - col_kw = dict( - (k, col_d[k]) - for k in ['nullable', 'autoincrement', 'quote', 'info', 'key'] - if k in col_d - ) + self._reflect_fk( + table_name, schema, table, cols_by_orig_name, + exclude_columns, reflection_options) - colargs = [] - if col_d.get('default') is not None: - # the "default" value is assumed to be a literal SQL - # expression, so is wrapped in text() so that no quoting - # occurs on re-issuance. - colargs.append( - sa_schema.DefaultClause( - sql.text(col_d['default']), _reflected=True - ) - ) + self._reflect_indexes( + table_name, schema, table, cols_by_orig_name, + include_columns, exclude_columns, reflection_options) - if 'sequence' in col_d: - # TODO: mssql and sybase are using this. - seq = col_d['sequence'] - sequence = sa_schema.Sequence(seq['name'], 1, 1) - if 'start' in seq: - sequence.start = seq['start'] - if 'increment' in seq: - sequence.increment = seq['increment'] - colargs.append(sequence) + def _reflect_column( + self, table, col_d, include_columns, + exclude_columns, cols_by_orig_name): - cols_by_orig_name[orig_name] = col = \ - sa_schema.Column(name, coltype, *colargs, **col_kw) + orig_name = col_d['name'] - if col.key in table.primary_key: - col.primary_key = True - table.append_column(col) + table.dispatch.column_reflect(self, table, col_d) - if not found_table: - raise exc.NoSuchTableError(table.name) + # fetch name again as column_reflect is allowed to + # change it + name = col_d['name'] + if (include_columns and name not in include_columns) \ + or (exclude_columns and name in exclude_columns): + return + coltype = col_d['type'] + + col_kw = dict( + (k, col_d[k]) + for k in ['nullable', 'autoincrement', 'quote', 'info', 'key'] + if k in col_d + ) + + colargs = [] + if col_d.get('default') is not None: + # the "default" value is assumed to be a literal SQL + # expression, so is wrapped in text() so that no quoting + # occurs on re-issuance. + colargs.append( + sa_schema.DefaultClause( + sql.text(col_d['default']), _reflected=True + ) + ) + + if 'sequence' in col_d: + self._reflect_col_sequence(col_d, colargs) + + cols_by_orig_name[orig_name] = col = \ + sa_schema.Column(name, coltype, *colargs, **col_kw) + + if col.key in table.primary_key: + col.primary_key = True + table.append_column(col) + + def _reflect_col_sequence(self, col_d, colargs): + if 'sequence' in col_d: + # TODO: mssql and sybase are using this. + seq = col_d['sequence'] + sequence = sa_schema.Sequence(seq['name'], 1, 1) + if 'start' in seq: + sequence.start = seq['start'] + if 'increment' in seq: + sequence.increment = seq['increment'] + colargs.append(sequence) + + def _reflect_pk( + self, table_name, schema, table, + cols_by_orig_name, exclude_columns): pk_cons = self.get_pk_constraint( table_name, schema, **table.dialect_kwargs) if pk_cons: @@ -554,6 +582,9 @@ class Inspector(object): # its column collection table.primary_key._reload(pk_cols) + def _reflect_fk( + self, table_name, schema, table, cols_by_orig_name, + exclude_columns, reflection_options): fkeys = self.get_foreign_keys( table_name, schema, **table.dialect_kwargs) for fkey_d in fkeys: @@ -596,6 +627,10 @@ class Inspector(object): sa_schema.ForeignKeyConstraint(constrained_columns, refspec, conname, link_to_name=True, **options)) + + def _reflect_indexes( + self, table_name, schema, table, cols_by_orig_name, + include_columns, exclude_columns, reflection_options): # Indexes indexes = self.get_indexes(table_name, schema) for index_d in indexes: -- cgit v1.2.1 From 7dfd46ec442f5e84e62ce9b047837f464f41fa97 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 27 Sep 2014 16:35:32 -0400 Subject: - cyclomatic complexity: instrument_class goes from E to an A --- lib/sqlalchemy/orm/collections.py | 48 ++++++++++++++++++++++++++++++--------- 1 file changed, 37 insertions(+), 11 deletions(-) diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py index c2754d58f..356a8a3b9 100644 --- a/lib/sqlalchemy/orm/collections.py +++ b/lib/sqlalchemy/orm/collections.py @@ -861,11 +861,24 @@ def _instrument_class(cls): "Can not instrument a built-in type. Use a " "subclass, even a trivial one.") + roles, methods = _locate_roles_and_methods(cls) + + _setup_canned_roles(cls, roles, methods) + + _assert_required_roles(cls, roles, methods) + + _set_collection_attributes(cls, roles, methods) + + +def _locate_roles_and_methods(cls): + """search for _sa_instrument_role-decorated methods in + method resolution order, assign to roles. + + """ + roles = {} methods = {} - # search for _sa_instrument_role-decorated methods in - # method resolution order, assign to roles for supercls in cls.__mro__: for name, method in vars(supercls).items(): if not util.callable(method): @@ -890,14 +903,19 @@ def _instrument_class(cls): assert op in ('fire_append_event', 'fire_remove_event') after = op if before: - methods[name] = before[0], before[1], after + methods[name] = before + (after, ) elif after: methods[name] = None, None, after + return roles, methods + - # see if this class has "canned" roles based on a known - # collection type (dict, set, list). Apply those roles - # as needed to the "roles" dictionary, and also - # prepare "decorator" methods +def _setup_canned_roles(cls, roles, methods): + """see if this class has "canned" roles based on a known + collection type (dict, set, list). Apply those roles + as needed to the "roles" dictionary, and also + prepare "decorator" methods + + """ collection_type = util.duck_type_collection(cls) if collection_type in __interfaces: canned_roles, decorators = __interfaces[collection_type] @@ -911,8 +929,12 @@ def _instrument_class(cls): not hasattr(fn, '_sa_instrumented')): setattr(cls, method, decorator(fn)) - # ensure all roles are present, and apply implicit instrumentation if - # needed + +def _assert_required_roles(cls, roles, methods): + """ensure all roles are present, and apply implicit instrumentation if + needed + + """ if 'appender' not in roles or not hasattr(cls, roles['appender']): raise sa_exc.ArgumentError( "Type %s must elect an appender method to be " @@ -934,8 +956,12 @@ def _instrument_class(cls): "Type %s must elect an iterator method to be " "a collection class" % cls.__name__) - # apply ad-hoc instrumentation from decorators, class-level defaults - # and implicit role declarations + +def _set_collection_attributes(cls, roles, methods): + """apply ad-hoc instrumentation from decorators, class-level defaults + and implicit role declarations + + """ for method_name, (before, argument, after) in methods.items(): setattr(cls, method_name, _instrument_membership_mutator(getattr(cls, method_name), -- cgit v1.2.1 From 7473ab42c4ad58e90c0324673814db3cd52abf84 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 27 Sep 2014 16:46:23 -0400 Subject: - cyclomatic complexity: _join_condition goes from E to a B --- lib/sqlalchemy/sql/selectable.py | 97 ++++++++++++++++++++++------------------ 1 file changed, 54 insertions(+), 43 deletions(-) diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index 248048662..b4df87e54 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -746,6 +746,33 @@ class Join(FromClause): providing a "natural join". """ + constraints = cls._joincond_scan_left_right( + a, a_subset, b, consider_as_foreign_keys) + + if len(constraints) > 1: + cls._joincond_trim_constraints( + a, b, constraints, consider_as_foreign_keys) + + if len(constraints) == 0: + if isinstance(b, FromGrouping): + hint = " Perhaps you meant to convert the right side to a "\ + "subquery using alias()?" + else: + hint = "" + raise exc.NoForeignKeysError( + "Can't find any foreign key relationships " + "between '%s' and '%s'.%s" % + (a.description, b.description, hint)) + + crit = [(x == y) for x, y in list(constraints.values())[0]] + if len(crit) == 1: + return (crit[0]) + else: + return and_(*crit) + + @classmethod + def _joincond_scan_left_right( + cls, a, a_subset, b, consider_as_foreign_keys): constraints = collections.defaultdict(list) for left in (a_subset, a): @@ -780,57 +807,41 @@ class Join(FromClause): if nrte.table_name == b.name: raise else: - # this is totally covered. can't get - # coverage to mark it. continue if col is not None: constraints[fk.constraint].append((col, fk.parent)) if constraints: break + return constraints + @classmethod + def _joincond_trim_constraints( + cls, a, b, constraints, consider_as_foreign_keys): + # more than one constraint matched. narrow down the list + # to include just those FKCs that match exactly to + # "consider_as_foreign_keys". + if consider_as_foreign_keys: + for const in list(constraints): + if set(f.parent for f in const.elements) != set( + consider_as_foreign_keys): + del constraints[const] + + # if still multiple constraints, but + # they all refer to the exact same end result, use it. if len(constraints) > 1: - # more than one constraint matched. narrow down the list - # to include just those FKCs that match exactly to - # "consider_as_foreign_keys". - if consider_as_foreign_keys: - for const in list(constraints): - if set(f.parent for f in const.elements) != set( - consider_as_foreign_keys): - del constraints[const] - - # if still multiple constraints, but - # they all refer to the exact same end result, use it. - if len(constraints) > 1: - dedupe = set(tuple(crit) for crit in constraints.values()) - if len(dedupe) == 1: - key = list(constraints)[0] - constraints = {key: constraints[key]} - - if len(constraints) != 1: - raise exc.AmbiguousForeignKeysError( - "Can't determine join between '%s' and '%s'; " - "tables have more than one foreign key " - "constraint relationship between them. " - "Please specify the 'onclause' of this " - "join explicitly." % (a.description, b.description)) - - if len(constraints) == 0: - if isinstance(b, FromGrouping): - hint = " Perhaps you meant to convert the right side to a "\ - "subquery using alias()?" - else: - hint = "" - raise exc.NoForeignKeysError( - "Can't find any foreign key relationships " - "between '%s' and '%s'.%s" % - (a.description, b.description, hint)) - - crit = [(x == y) for x, y in list(constraints.values())[0]] - if len(crit) == 1: - return (crit[0]) - else: - return and_(*crit) + dedupe = set(tuple(crit) for crit in constraints.values()) + if len(dedupe) == 1: + key = list(constraints)[0] + constraints = {key: constraints[key]} + + if len(constraints) != 1: + raise exc.AmbiguousForeignKeysError( + "Can't determine join between '%s' and '%s'; " + "tables have more than one foreign key " + "constraint relationship between them. " + "Please specify the 'onclause' of this " + "join explicitly." % (a.description, b.description)) def select(self, whereclause=None, **kwargs): """Create a :class:`.Select` from this :class:`.Join`. -- cgit v1.2.1 From 3cc0cf0148ab0905e9903e282f7d1e1190d1c871 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 27 Sep 2014 17:59:05 -0400 Subject: - cyclomatic complexity: _get_colparams() becomes sql.crud._get_crud_params, CC goes from F to D --- lib/sqlalchemy/sql/compiler.py | 459 ++++----------------------------------- lib/sqlalchemy/sql/crud.py | 473 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 511 insertions(+), 421 deletions(-) create mode 100644 lib/sqlalchemy/sql/crud.py diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index abda31358..0bdc60b8c 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -24,12 +24,10 @@ To generate user-defined SQL strings, see """ import re -from . import schema, sqltypes, operators, functions, \ - util as sql_util, visitors, elements, selectable, base +from . import schema, sqltypes, operators, functions, visitors, \ + elements, selectable, crud from .. import util, exc -import decimal import itertools -import operator RESERVED_WORDS = set([ 'all', 'analyse', 'analyze', 'and', 'any', 'array', @@ -64,17 +62,6 @@ BIND_TEMPLATES = { 'named': ":%(name)s" } -REQUIRED = util.symbol('REQUIRED', """ -Placeholder for the value within a :class:`.BindParameter` -which is required to be present when the statement is passed -to :meth:`.Connection.execute`. - -This symbol is typically used when a :func:`.expression.insert` -or :func:`.expression.update` statement is compiled without parameter -values present. - -""") - OPERATORS = { # binary @@ -725,7 +712,6 @@ class SQLCompiler(Compiled): for c in clauselist.clauses) if s) - def visit_case(self, clause, **kwargs): x = "CASE " if clause.value is not None: @@ -819,7 +805,8 @@ class SQLCompiler(Compiled): text += " GROUP BY " + group_by text += self.order_by_clause(cs, **kwargs) - text += (cs._limit_clause is not None or cs._offset_clause is not None) and \ + text += (cs._limit_clause is not None + or cs._offset_clause is not None) and \ self.limit_clause(cs) or "" if self.ctes and \ @@ -876,15 +863,15 @@ class SQLCompiler(Compiled): isinstance(binary.right, elements.BindParameter): kw['literal_binds'] = True - operator = binary.operator - disp = getattr(self, "visit_%s_binary" % operator.__name__, None) + operator_ = binary.operator + disp = getattr(self, "visit_%s_binary" % operator_.__name__, None) if disp: - return disp(binary, operator, **kw) + return disp(binary, operator_, **kw) else: try: - opstring = OPERATORS[operator] + opstring = OPERATORS[operator_] except KeyError: - raise exc.UnsupportedCompilationError(self, operator) + raise exc.UnsupportedCompilationError(self, operator_) else: return self._generate_generic_binary(binary, opstring, **kw) @@ -966,7 +953,7 @@ class SQLCompiler(Compiled): ' ESCAPE ' + self.render_literal_value(escape, sqltypes.STRINGTYPE) if escape else '' - ) + ) def visit_notlike_op_binary(self, binary, operator, **kw): escape = binary.modifiers.get("escape", None) @@ -977,7 +964,7 @@ class SQLCompiler(Compiled): ' ESCAPE ' + self.render_literal_value(escape, sqltypes.STRINGTYPE) if escape else '' - ) + ) def visit_ilike_op_binary(self, binary, operator, **kw): escape = binary.modifiers.get("escape", None) @@ -988,7 +975,7 @@ class SQLCompiler(Compiled): ' ESCAPE ' + self.render_literal_value(escape, sqltypes.STRINGTYPE) if escape else '' - ) + ) def visit_notilike_op_binary(self, binary, operator, **kw): escape = binary.modifiers.get("escape", None) @@ -999,7 +986,7 @@ class SQLCompiler(Compiled): ' ESCAPE ' + self.render_literal_value(escape, sqltypes.STRINGTYPE) if escape else '' - ) + ) def visit_between_op_binary(self, binary, operator, **kw): symmetric = binary.modifiers.get("symmetric", False) @@ -1720,9 +1707,9 @@ class SQLCompiler(Compiled): def visit_insert(self, insert_stmt, **kw): self.isinsert = True - colparams = self._get_colparams(insert_stmt, **kw) + crud_params = crud._get_crud_params(self, insert_stmt, **kw) - if not colparams and \ + if not crud_params and \ not self.dialect.supports_default_values and \ not self.dialect.supports_empty_insert: raise exc.CompileError("The '%s' dialect with current database " @@ -1737,9 +1724,9 @@ class SQLCompiler(Compiled): "version settings does not support " "in-place multirow inserts." % self.dialect.name) - colparams_single = colparams[0] + crud_params_single = crud_params[0] else: - colparams_single = colparams + crud_params_single = crud_params preparer = self.preparer supports_default_values = self.dialect.supports_default_values @@ -1770,9 +1757,9 @@ class SQLCompiler(Compiled): text += table_text - if colparams_single or not supports_default_values: + if crud_params_single or not supports_default_values: text += " (%s)" % ', '.join([preparer.format_column(c[0]) - for c in colparams_single]) + for c in crud_params_single]) if self.returning or insert_stmt._returning: self.returning = self.returning or insert_stmt._returning @@ -1784,20 +1771,20 @@ class SQLCompiler(Compiled): if insert_stmt.select is not None: text += " %s" % self.process(insert_stmt.select, **kw) - elif not colparams and supports_default_values: + elif not crud_params and supports_default_values: text += " DEFAULT VALUES" elif insert_stmt._has_multi_parameters: text += " VALUES %s" % ( ", ".join( "(%s)" % ( - ', '.join(c[1] for c in colparam_set) + ', '.join(c[1] for c in crud_param_set) ) - for colparam_set in colparams + for crud_param_set in crud_params ) ) else: text += " VALUES (%s)" % \ - ', '.join([c[1] for c in colparams]) + ', '.join([c[1] for c in crud_params]) if self.returning and not self.returning_precedes_values: text += " " + returning_clause @@ -1854,7 +1841,7 @@ class SQLCompiler(Compiled): table_text = self.update_tables_clause(update_stmt, update_stmt.table, extra_froms, **kw) - colparams = self._get_colparams(update_stmt, **kw) + crud_params = crud._get_crud_params(self, update_stmt, **kw) if update_stmt._hints: dialect_hints = dict([ @@ -1881,7 +1868,7 @@ class SQLCompiler(Compiled): text += ', '.join( c[0]._compiler_dispatch(self, include_table=include_table) + - '=' + c[1] for c in colparams + '=' + c[1] for c in crud_params ) if self.returning or update_stmt._returning: @@ -1917,380 +1904,9 @@ class SQLCompiler(Compiled): return text - def _create_crud_bind_param(self, col, value, required=False, name=None): - if name is None: - name = col.key - bindparam = elements.BindParameter(name, value, - type_=col.type, required=required) - bindparam._is_crud = True - return bindparam._compiler_dispatch(self) - @util.memoized_property def _key_getters_for_crud_column(self): - if self.isupdate and self.statement._extra_froms: - # when extra tables are present, refer to the columns - # in those extra tables as table-qualified, including in - # dictionaries and when rendering bind param names. - # the "main" table of the statement remains unqualified, - # allowing the most compatibility with a non-multi-table - # statement. - _et = set(self.statement._extra_froms) - - def _column_as_key(key): - str_key = elements._column_as_key(key) - if hasattr(key, 'table') and key.table in _et: - return (key.table.name, str_key) - else: - return str_key - - def _getattr_col_key(col): - if col.table in _et: - return (col.table.name, col.key) - else: - return col.key - - def _col_bind_name(col): - if col.table in _et: - return "%s_%s" % (col.table.name, col.key) - else: - return col.key - - else: - _column_as_key = elements._column_as_key - _getattr_col_key = _col_bind_name = operator.attrgetter("key") - - return _column_as_key, _getattr_col_key, _col_bind_name - - def _get_colparams(self, stmt, **kw): - """create a set of tuples representing column/string pairs for use - in an INSERT or UPDATE statement. - - Also generates the Compiled object's postfetch, prefetch, and - returning column collections, used for default handling and ultimately - populating the ResultProxy's prefetch_cols() and postfetch_cols() - collections. - - """ - - self.postfetch = [] - self.prefetch = [] - self.returning = [] - - # no parameters in the statement, no parameters in the - # compiled params - return binds for all columns - if self.column_keys is None and stmt.parameters is None: - return [ - (c, self._create_crud_bind_param(c, - None, required=True)) - for c in stmt.table.columns - ] - - if stmt._has_multi_parameters: - stmt_parameters = stmt.parameters[0] - else: - stmt_parameters = stmt.parameters - - # getters - these are normally just column.key, - # but in the case of mysql multi-table update, the rules for - # .key must conditionally take tablename into account - _column_as_key, _getattr_col_key, _col_bind_name = \ - self._key_getters_for_crud_column - - # if we have statement parameters - set defaults in the - # compiled params - if self.column_keys is None: - parameters = {} - else: - parameters = dict((_column_as_key(key), REQUIRED) - for key in self.column_keys - if not stmt_parameters or - key not in stmt_parameters) - - # create a list of column assignment clauses as tuples - values = [] - - if stmt_parameters is not None: - for k, v in stmt_parameters.items(): - colkey = _column_as_key(k) - if colkey is not None: - parameters.setdefault(colkey, v) - else: - # a non-Column expression on the left side; - # add it to values() in an "as-is" state, - # coercing right side to bound param - if elements._is_literal(v): - v = self.process( - elements.BindParameter(None, v, type_=k.type), - **kw) - else: - v = self.process(v.self_group(), **kw) - - values.append((k, v)) - - need_pks = self.isinsert and \ - not self.inline and \ - not stmt._returning and \ - not stmt._has_multi_parameters - - implicit_returning = need_pks and \ - self.dialect.implicit_returning and \ - stmt.table.implicit_returning - - if self.isinsert: - implicit_return_defaults = (implicit_returning and - stmt._return_defaults) - elif self.isupdate: - implicit_return_defaults = (self.dialect.implicit_returning and - stmt.table.implicit_returning and - stmt._return_defaults) - else: - implicit_return_defaults = False - - if implicit_return_defaults: - if stmt._return_defaults is True: - implicit_return_defaults = set(stmt.table.c) - else: - implicit_return_defaults = set(stmt._return_defaults) - - postfetch_lastrowid = need_pks and self.dialect.postfetch_lastrowid - - check_columns = {} - - # special logic that only occurs for multi-table UPDATE - # statements - if self.isupdate and stmt._extra_froms and stmt_parameters: - normalized_params = dict( - (elements._clause_element_as_expr(c), param) - for c, param in stmt_parameters.items() - ) - affected_tables = set() - for t in stmt._extra_froms: - for c in t.c: - if c in normalized_params: - affected_tables.add(t) - check_columns[_getattr_col_key(c)] = c - value = normalized_params[c] - if elements._is_literal(value): - value = self._create_crud_bind_param( - c, value, required=value is REQUIRED, - name=_col_bind_name(c)) - else: - self.postfetch.append(c) - value = self.process(value.self_group(), **kw) - values.append((c, value)) - # determine tables which are actually - # to be updated - process onupdate and - # server_onupdate for these - for t in affected_tables: - for c in t.c: - if c in normalized_params: - continue - elif (c.onupdate is not None and not - c.onupdate.is_sequence): - if c.onupdate.is_clause_element: - values.append( - (c, self.process( - c.onupdate.arg.self_group(), - **kw) - ) - ) - self.postfetch.append(c) - else: - values.append( - (c, self._create_crud_bind_param( - c, None, name=_col_bind_name(c) - ) - ) - ) - self.prefetch.append(c) - elif c.server_onupdate is not None: - self.postfetch.append(c) - - if self.isinsert and stmt.select_names: - # for an insert from select, we can only use names that - # are given, so only select for those names. - cols = (stmt.table.c[_column_as_key(name)] - for name in stmt.select_names) - else: - # iterate through all table columns to maintain - # ordering, even for those cols that aren't included - cols = stmt.table.columns - - for c in cols: - col_key = _getattr_col_key(c) - if col_key in parameters and col_key not in check_columns: - value = parameters.pop(col_key) - if elements._is_literal(value): - value = self._create_crud_bind_param( - c, value, required=value is REQUIRED, - name=_col_bind_name(c) - if not stmt._has_multi_parameters - else "%s_0" % _col_bind_name(c) - ) - else: - if isinstance(value, elements.BindParameter) and \ - value.type._isnull: - value = value._clone() - value.type = c.type - - if c.primary_key and implicit_returning: - self.returning.append(c) - value = self.process(value.self_group(), **kw) - elif implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - value = self.process(value.self_group(), **kw) - else: - self.postfetch.append(c) - value = self.process(value.self_group(), **kw) - values.append((c, value)) - - elif self.isinsert: - if c.primary_key and \ - need_pks and \ - ( - implicit_returning or - not postfetch_lastrowid or - c is not stmt.table._autoincrement_column - ): - - if implicit_returning: - if c.default is not None: - if c.default.is_sequence: - if self.dialect.supports_sequences and \ - (not c.default.optional or - not self.dialect.sequences_optional): - proc = self.process(c.default, **kw) - values.append((c, proc)) - self.returning.append(c) - elif c.default.is_clause_element: - values.append( - (c, self.process( - c.default.arg.self_group(), **kw)) - ) - self.returning.append(c) - else: - values.append( - (c, self._create_crud_bind_param(c, None)) - ) - self.prefetch.append(c) - else: - self.returning.append(c) - else: - if ( - (c.default is not None and - (not c.default.is_sequence or - self.dialect.supports_sequences)) or - c is stmt.table._autoincrement_column and - (self.dialect.supports_sequences or - self.dialect. - preexecute_autoincrement_sequences) - ): - - values.append( - (c, self._create_crud_bind_param(c, None)) - ) - - self.prefetch.append(c) - - elif c.default is not None: - if c.default.is_sequence: - if self.dialect.supports_sequences and \ - (not c.default.optional or - not self.dialect.sequences_optional): - proc = self.process(c.default, **kw) - values.append((c, proc)) - if implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - elif not c.primary_key: - self.postfetch.append(c) - elif c.default.is_clause_element: - values.append( - (c, self.process( - c.default.arg.self_group(), **kw)) - ) - - if implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - elif not c.primary_key: - # don't add primary key column to postfetch - self.postfetch.append(c) - else: - values.append( - (c, self._create_crud_bind_param(c, None)) - ) - self.prefetch.append(c) - elif c.server_default is not None: - if implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - elif not c.primary_key: - self.postfetch.append(c) - elif implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - - elif self.isupdate: - if c.onupdate is not None and not c.onupdate.is_sequence: - if c.onupdate.is_clause_element: - values.append( - (c, self.process( - c.onupdate.arg.self_group(), **kw)) - ) - if implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - else: - self.postfetch.append(c) - else: - values.append( - (c, self._create_crud_bind_param(c, None)) - ) - self.prefetch.append(c) - elif c.server_onupdate is not None: - if implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - else: - self.postfetch.append(c) - elif implicit_return_defaults and \ - c in implicit_return_defaults: - self.returning.append(c) - - if parameters and stmt_parameters: - check = set(parameters).intersection( - _column_as_key(k) for k in stmt.parameters - ).difference(check_columns) - if check: - raise exc.CompileError( - "Unconsumed column names: %s" % - (", ".join("%s" % c for c in check)) - ) - - if stmt._has_multi_parameters: - values_0 = values - values = [values] - - values.extend( - [ - ( - c, - (self._create_crud_bind_param( - c, row[c.key], - name="%s_%d" % (c.key, i + 1) - ) if elements._is_literal(row[c.key]) - else self.process( - row[c.key].self_group(), **kw)) - if c.key in row else param - ) - for (c, param) in values_0 - ] - for i, row in enumerate(stmt.parameters[1:]) - ) - - return values + return crud._key_getters_for_crud_column(self) def visit_delete(self, delete_stmt, **kw): self.stack.append({'correlate_froms': set([delete_stmt.table]), @@ -2474,17 +2090,18 @@ class DDLCompiler(Compiled): constraints.extend([c for c in table._sorted_constraints if c is not table.primary_key]) - return ", \n\t".join(p for p in - (self.process(constraint) - for constraint in constraints - if ( - constraint._create_rule is None or - constraint._create_rule(self)) - and ( - not self.dialect.supports_alter or - not getattr(constraint, 'use_alter', False) - )) if p is not None - ) + return ", \n\t".join( + p for p in + (self.process(constraint) + for constraint in constraints + if ( + constraint._create_rule is None or + constraint._create_rule(self)) + and ( + not self.dialect.supports_alter or + not getattr(constraint, 'use_alter', False) + )) if p is not None + ) def visit_drop_table(self, drop): return "\nDROP TABLE " + self.preparer.format_table(drop.element) diff --git a/lib/sqlalchemy/sql/crud.py b/lib/sqlalchemy/sql/crud.py new file mode 100644 index 000000000..1c1f661d2 --- /dev/null +++ b/lib/sqlalchemy/sql/crud.py @@ -0,0 +1,473 @@ +# sql/crud.py +# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Functions used by compiler.py to determine the parameters rendered +within INSERT and UPDATE statements. + +""" +from .. import util +from .. import exc +from . import elements +import operator + +REQUIRED = util.symbol('REQUIRED', """ +Placeholder for the value within a :class:`.BindParameter` +which is required to be present when the statement is passed +to :meth:`.Connection.execute`. + +This symbol is typically used when a :func:`.expression.insert` +or :func:`.expression.update` statement is compiled without parameter +values present. + +""") + + +def _get_crud_params(compiler, stmt, **kw): + """create a set of tuples representing column/string pairs for use + in an INSERT or UPDATE statement. + + Also generates the Compiled object's postfetch, prefetch, and + returning column collections, used for default handling and ultimately + populating the ResultProxy's prefetch_cols() and postfetch_cols() + collections. + + """ + + compiler.postfetch = [] + compiler.prefetch = [] + compiler.returning = [] + + # no parameters in the statement, no parameters in the + # compiled params - return binds for all columns + if compiler.column_keys is None and stmt.parameters is None: + return [ + (c, _create_bind_param( + compiler, c, None, required=True)) + for c in stmt.table.columns + ] + + if stmt._has_multi_parameters: + stmt_parameters = stmt.parameters[0] + else: + stmt_parameters = stmt.parameters + + # getters - these are normally just column.key, + # but in the case of mysql multi-table update, the rules for + # .key must conditionally take tablename into account + _column_as_key, _getattr_col_key, _col_bind_name = \ + _key_getters_for_crud_column(compiler) + + # if we have statement parameters - set defaults in the + # compiled params + if compiler.column_keys is None: + parameters = {} + else: + parameters = dict((_column_as_key(key), REQUIRED) + for key in compiler.column_keys + if not stmt_parameters or + key not in stmt_parameters) + + # create a list of column assignment clauses as tuples + values = [] + + if stmt_parameters is not None: + _get_stmt_parameters_params( + compiler, + parameters, stmt_parameters, _column_as_key, values, kw) + + check_columns = {} + + # special logic that only occurs for multi-table UPDATE + # statements + if compiler.isupdate and stmt._extra_froms and stmt_parameters: + _get_multitable_params( + compiler, stmt, stmt_parameters, check_columns, + _col_bind_name, _getattr_col_key, values, kw) + + if compiler.isinsert and stmt.select_names: + # for an insert from select, we can only use names that + # are given, so only select for those names. + cols = (stmt.table.c[_column_as_key(name)] + for name in stmt.select_names) + else: + # iterate through all table columns to maintain + # ordering, even for those cols that aren't included + cols = stmt.table.columns + + _scan_cols( + compiler, stmt, cols, parameters, + _getattr_col_key, _col_bind_name, check_columns, values, kw) + + if parameters and stmt_parameters: + check = set(parameters).intersection( + _column_as_key(k) for k in stmt.parameters + ).difference(check_columns) + if check: + raise exc.CompileError( + "Unconsumed column names: %s" % + (", ".join("%s" % c for c in check)) + ) + + if stmt._has_multi_parameters: + values = _extend_values_for_multiparams(compiler, stmt, values, kw) + + return values + + +def _create_bind_param(compiler, col, value, required=False, name=None): + if name is None: + name = col.key + bindparam = elements.BindParameter(name, value, + type_=col.type, required=required) + bindparam._is_crud = True + return bindparam._compiler_dispatch(compiler) + +def _key_getters_for_crud_column(compiler): + if compiler.isupdate and compiler.statement._extra_froms: + # when extra tables are present, refer to the columns + # in those extra tables as table-qualified, including in + # dictionaries and when rendering bind param names. + # the "main" table of the statement remains unqualified, + # allowing the most compatibility with a non-multi-table + # statement. + _et = set(compiler.statement._extra_froms) + + def _column_as_key(key): + str_key = elements._column_as_key(key) + if hasattr(key, 'table') and key.table in _et: + return (key.table.name, str_key) + else: + return str_key + + def _getattr_col_key(col): + if col.table in _et: + return (col.table.name, col.key) + else: + return col.key + + def _col_bind_name(col): + if col.table in _et: + return "%s_%s" % (col.table.name, col.key) + else: + return col.key + + else: + _column_as_key = elements._column_as_key + _getattr_col_key = _col_bind_name = operator.attrgetter("key") + + return _column_as_key, _getattr_col_key, _col_bind_name + + +def _scan_cols( + compiler, stmt, cols, parameters, _getattr_col_key, + _col_bind_name, check_columns, values, kw): + + need_pks, implicit_returning, \ + implicit_return_defaults, postfetch_lastrowid = \ + _get_returning_modifiers(compiler, stmt) + + for c in cols: + col_key = _getattr_col_key(c) + if col_key in parameters and col_key not in check_columns: + + _append_param_parameter( + compiler, stmt, c, col_key, parameters, _col_bind_name, + implicit_returning, implicit_return_defaults, values, kw) + + elif compiler.isinsert: + if c.primary_key and \ + need_pks and \ + ( + implicit_returning or + not postfetch_lastrowid or + c is not stmt.table._autoincrement_column + ): + + if implicit_returning: + _append_param_insert_pk_returning( + compiler, stmt, c, values, kw) + else: + _append_param_insert_pk(compiler, stmt, c, values, kw) + + elif c.default is not None: + + _append_param_insert_hasdefault( + compiler, stmt, c, implicit_return_defaults, values, kw) + + elif c.server_default is not None: + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + elif not c.primary_key: + compiler.postfetch.append(c) + elif implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + + elif compiler.isupdate: + _append_param_update( + compiler, stmt, c, implicit_return_defaults, values, kw) + + +def _append_param_parameter( + compiler, stmt, c, col_key, parameters, _col_bind_name, + implicit_returning, implicit_return_defaults, values, kw): + value = parameters.pop(col_key) + if elements._is_literal(value): + value = _create_bind_param( + compiler, c, value, required=value is REQUIRED, + name=_col_bind_name(c) + if not stmt._has_multi_parameters + else "%s_0" % _col_bind_name(c) + ) + else: + if isinstance(value, elements.BindParameter) and \ + value.type._isnull: + value = value._clone() + value.type = c.type + + if c.primary_key and implicit_returning: + compiler.returning.append(c) + value = compiler.process(value.self_group(), **kw) + elif implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + value = compiler.process(value.self_group(), **kw) + else: + compiler.postfetch.append(c) + value = compiler.process(value.self_group(), **kw) + values.append((c, value)) + + +def _append_param_insert_pk_returning(compiler, stmt, c, values, kw): + if c.default is not None: + if c.default.is_sequence: + if compiler.dialect.supports_sequences and \ + (not c.default.optional or + not compiler.dialect.sequences_optional): + proc = compiler.process(c.default, **kw) + values.append((c, proc)) + compiler.returning.append(c) + elif c.default.is_clause_element: + values.append( + (c, compiler.process( + c.default.arg.self_group(), **kw)) + ) + compiler.returning.append(c) + else: + values.append( + (c, _create_bind_param(compiler, c, None)) + ) + compiler.prefetch.append(c) + else: + compiler.returning.append(c) + + +def _append_param_insert_pk(compiler, stmt, c, values, kw): + if ( + (c.default is not None and + (not c.default.is_sequence or + compiler.dialect.supports_sequences)) or + c is stmt.table._autoincrement_column and + (compiler.dialect.supports_sequences or + compiler.dialect. + preexecute_autoincrement_sequences) + ): + values.append( + (c, _create_bind_param(compiler, c, None)) + ) + + compiler.prefetch.append(c) + + +def _append_param_insert_hasdefault( + compiler, stmt, c, implicit_return_defaults, values, kw): + + if c.default.is_sequence: + if compiler.dialect.supports_sequences and \ + (not c.default.optional or + not compiler.dialect.sequences_optional): + proc = compiler.process(c.default, **kw) + values.append((c, proc)) + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + elif not c.primary_key: + compiler.postfetch.append(c) + elif c.default.is_clause_element: + values.append( + (c, compiler.process( + c.default.arg.self_group(), **kw)) + ) + + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + elif not c.primary_key: + # don't add primary key column to postfetch + compiler.postfetch.append(c) + else: + values.append( + (c, _create_bind_param(compiler, c, None)) + ) + compiler.prefetch.append(c) + + +def _append_param_update( + compiler, stmt, c, implicit_return_defaults, values, kw): + + if c.onupdate is not None and not c.onupdate.is_sequence: + if c.onupdate.is_clause_element: + values.append( + (c, compiler.process( + c.onupdate.arg.self_group(), **kw)) + ) + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + else: + compiler.postfetch.append(c) + else: + values.append( + (c, _create_bind_param(compiler, c, None)) + ) + compiler.prefetch.append(c) + elif c.server_onupdate is not None: + if implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + else: + compiler.postfetch.append(c) + elif implicit_return_defaults and \ + c in implicit_return_defaults: + compiler.returning.append(c) + + +def _get_multitable_params( + compiler, stmt, stmt_parameters, check_columns, + _col_bind_name, _getattr_col_key, values, kw): + + normalized_params = dict( + (elements._clause_element_as_expr(c), param) + for c, param in stmt_parameters.items() + ) + affected_tables = set() + for t in stmt._extra_froms: + for c in t.c: + if c in normalized_params: + affected_tables.add(t) + check_columns[_getattr_col_key(c)] = c + value = normalized_params[c] + if elements._is_literal(value): + value = _create_bind_param( + compiler, c, value, required=value is REQUIRED, + name=_col_bind_name(c)) + else: + compiler.postfetch.append(c) + value = compiler.process(value.self_group(), **kw) + values.append((c, value)) + # determine tables which are actually to be updated - process onupdate + # and server_onupdate for these + for t in affected_tables: + for c in t.c: + if c in normalized_params: + continue + elif (c.onupdate is not None and not + c.onupdate.is_sequence): + if c.onupdate.is_clause_element: + values.append( + (c, compiler.process( + c.onupdate.arg.self_group(), + **kw) + ) + ) + compiler.postfetch.append(c) + else: + values.append( + (c, _create_bind_param( + compiler, c, None, name=_col_bind_name(c) + ) + ) + ) + compiler.prefetch.append(c) + elif c.server_onupdate is not None: + compiler.postfetch.append(c) + + +def _extend_values_for_multiparams(compiler, stmt, values, kw): + values_0 = values + values = [values] + + values.extend( + [ + ( + c, + (_create_bind_param( + compiler, c, row[c.key], + name="%s_%d" % (c.key, i + 1) + ) if elements._is_literal(row[c.key]) + else compiler.process( + row[c.key].self_group(), **kw)) + if c.key in row else param + ) + for (c, param) in values_0 + ] + for i, row in enumerate(stmt.parameters[1:]) + ) + return values + + +def _get_stmt_parameters_params( + compiler, parameters, stmt_parameters, _column_as_key, values, kw): + for k, v in stmt_parameters.items(): + colkey = _column_as_key(k) + if colkey is not None: + parameters.setdefault(colkey, v) + else: + # a non-Column expression on the left side; + # add it to values() in an "as-is" state, + # coercing right side to bound param + if elements._is_literal(v): + v = compiler.process( + elements.BindParameter(None, v, type_=k.type), + **kw) + else: + v = compiler.process(v.self_group(), **kw) + + values.append((k, v)) + + +def _get_returning_modifiers(compiler, stmt): + need_pks = compiler.isinsert and \ + not compiler.inline and \ + not stmt._returning and \ + not stmt._has_multi_parameters + + implicit_returning = need_pks and \ + compiler.dialect.implicit_returning and \ + stmt.table.implicit_returning + + if compiler.isinsert: + implicit_return_defaults = (implicit_returning and + stmt._return_defaults) + elif compiler.isupdate: + implicit_return_defaults = (compiler.dialect.implicit_returning and + stmt.table.implicit_returning and + stmt._return_defaults) + else: + implicit_return_defaults = False + + if implicit_return_defaults: + if stmt._return_defaults is True: + implicit_return_defaults = set(stmt.table.c) + else: + implicit_return_defaults = set(stmt._return_defaults) + + postfetch_lastrowid = need_pks and compiler.dialect.postfetch_lastrowid + + return need_pks, implicit_returning, \ + implicit_return_defaults, postfetch_lastrowid -- cgit v1.2.1 From 9a10b8a7f55d78620a77f8345e64718baec5ed91 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 27 Sep 2014 18:13:50 -0400 Subject: - cyclomatic complexity; break up visit_select, goes from F to D --- lib/sqlalchemy/sql/compiler.py | 121 +++++++++++++++++++++++------------------ 1 file changed, 69 insertions(+), 52 deletions(-) diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 0bdc60b8c..18b4d4cfc 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -1491,29 +1491,7 @@ class SQLCompiler(Compiled): select, transformed_select) return text - correlate_froms = entry['correlate_froms'] - asfrom_froms = entry['asfrom_froms'] - - if asfrom: - froms = select._get_display_froms( - explicit_correlate_froms=correlate_froms.difference( - asfrom_froms), - implicit_correlate_froms=()) - else: - froms = select._get_display_froms( - explicit_correlate_froms=correlate_froms, - implicit_correlate_froms=asfrom_froms) - - new_correlate_froms = set(selectable._from_objects(*froms)) - all_correlate_froms = new_correlate_froms.union(correlate_froms) - - new_entry = { - 'asfrom_froms': new_correlate_froms, - 'iswrapper': iswrapper, - 'correlate_froms': all_correlate_froms, - 'selectable': select, - } - self.stack.append(new_entry) + froms = self._setup_select_stack(select, entry, asfrom, iswrapper) column_clause_args = kwargs.copy() column_clause_args.update({ @@ -1524,18 +1502,11 @@ class SQLCompiler(Compiled): text = "SELECT " # we're off to a good start ! if select._hints: - byfrom = dict([ - (from_, hinttext % { - 'name': from_._compiler_dispatch( - self, ashint=True) - }) - for (from_, dialect), hinttext in - select._hints.items() - if dialect in ('*', self.dialect.name) - ]) - hint_text = self.get_select_hint_text(byfrom) + hint_text, byfrom = self._setup_select_hints(select) if hint_text: text += hint_text + " " + else: + byfrom = None if select._prefixes: text += self._generate_prefixes( @@ -1556,6 +1527,70 @@ class SQLCompiler(Compiled): if c is not None ] + text = self._compose_select_body( + text, select, inner_columns, froms, byfrom, kwargs) + + if select._statement_hints: + per_dialect = [ + ht for (dialect_name, ht) + in select._statement_hints + if dialect_name in ('*', self.dialect.name) + ] + if per_dialect: + text += " " + self.get_statement_hint_text(per_dialect) + + if self.ctes and \ + compound_index == 0 and toplevel: + text = self._render_cte_clause() + text + + self.stack.pop(-1) + + if asfrom and parens: + return "(" + text + ")" + else: + return text + + def _setup_select_hints(self, select): + byfrom = dict([ + (from_, hinttext % { + 'name': from_._compiler_dispatch( + self, ashint=True) + }) + for (from_, dialect), hinttext in + select._hints.items() + if dialect in ('*', self.dialect.name) + ]) + hint_text = self.get_select_hint_text(byfrom) + return hint_text, byfrom + + def _setup_select_stack(self, select, entry, asfrom, iswrapper): + correlate_froms = entry['correlate_froms'] + asfrom_froms = entry['asfrom_froms'] + + if asfrom: + froms = select._get_display_froms( + explicit_correlate_froms=correlate_froms.difference( + asfrom_froms), + implicit_correlate_froms=()) + else: + froms = select._get_display_froms( + explicit_correlate_froms=correlate_froms, + implicit_correlate_froms=asfrom_froms) + + new_correlate_froms = set(selectable._from_objects(*froms)) + all_correlate_froms = new_correlate_froms.union(correlate_froms) + + new_entry = { + 'asfrom_froms': new_correlate_froms, + 'iswrapper': iswrapper, + 'correlate_froms': all_correlate_froms, + 'selectable': select, + } + self.stack.append(new_entry) + return froms + + def _compose_select_body( + self, text, select, inner_columns, froms, byfrom, kwargs): text += ', '.join(inner_columns) if froms: @@ -1599,25 +1634,7 @@ class SQLCompiler(Compiled): if select._for_update_arg is not None: text += self.for_update_clause(select, **kwargs) - if select._statement_hints: - per_dialect = [ - ht for (dialect_name, ht) - in select._statement_hints - if dialect_name in ('*', self.dialect.name) - ] - if per_dialect: - text += " " + self.get_statement_hint_text(per_dialect) - - if self.ctes and \ - compound_index == 0 and toplevel: - text = self._render_cte_clause() + text - - self.stack.pop(-1) - - if asfrom and parens: - return "(" + text + ")" - else: - return text + return text def _generate_prefixes(self, stmt, prefixes, **kw): clause = " ".join( -- cgit v1.2.1 From b6496ba3d28d685547eac2891bfea5f4ae60e834 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 29 Sep 2014 17:33:53 -0400 Subject: - A revisit to this issue first patched in 0.9.5, apparently psycopg2's ``.closed`` accessor is not as reliable as we assumed, so we have added an explicit check for the exception messages "SSL SYSCALL error: Bad file descriptor" and "SSL SYSCALL error: EOF detected" when detecting an is-disconnect scenario. We will continue to consult psycopg2's connection.closed as a first check. fixes #3021 --- doc/build/changelog/changelog_09.rst | 13 +++++++++++++ lib/sqlalchemy/dialects/postgresql/psycopg2.py | 14 +++++++++----- 2 files changed, 22 insertions(+), 5 deletions(-) diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index 7c75996a4..e5d6703e3 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -13,6 +13,19 @@ .. changelog:: :version: 0.9.8 + .. change:: + :tags: bug, postgresql + :versions: 1.0.0 + :tickets: 3021 + + A revisit to this issue first patched in 0.9.5, apparently + psycopg2's ``.closed`` accessor is not as reliable as we assumed, + so we have added an explicit check for the exception messages + "SSL SYSCALL error: Bad file descriptor" and + "SSL SYSCALL error: EOF detected" when detecting an + is-disconnect scenario. We will continue to consult psycopg2's + connection.closed as a first check. + .. change:: :tags: bug, orm, engine :versions: 1.0.0 diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index e6450c97f..9dfd53e22 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -512,12 +512,14 @@ class PGDialect_psycopg2(PGDialect): def is_disconnect(self, e, connection, cursor): if isinstance(e, self.dbapi.Error): # check the "closed" flag. this might not be - # present on old psycopg2 versions + # present on old psycopg2 versions. Also, + # this flag doesn't actually help in a lot of disconnect + # situations, so don't rely on it. if getattr(connection, 'closed', False): return True - # legacy checks based on strings. the "closed" check - # above most likely obviates the need for any of these. + # checks based on strings. in the case that .closed + # didn't cut it, fall back onto these. str_e = str(e).partition("\n")[0] for msg in [ # these error messages from libpq: interfaces/libpq/fe-misc.c @@ -534,8 +536,10 @@ class PGDialect_psycopg2(PGDialect): # not sure where this path is originally from, it may # be obsolete. It really says "losed", not "closed". 'losed the connection unexpectedly', - # this can occur in newer SSL - 'connection has been closed unexpectedly' + # these can occur in newer SSL + 'connection has been closed unexpectedly', + 'SSL SYSCALL error: Bad file descriptor', + 'SSL SYSCALL error: EOF detected', ]: idx = str_e.find(msg) if idx >= 0 and '"' not in str_e[:idx]: -- cgit v1.2.1 From 4da020dae324cb871074e302f4840e8731988be0 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 29 Sep 2014 18:09:25 -0400 Subject: - rework tests for attached databases into individual tests, test both memory and file-based - When selecting from a UNION using an attached database file, the pysqlite driver reports column names in cursor.description as 'dbname.tablename.colname', instead of 'tablename.colname' as it normally does for a UNION (note that it's supposed to just be 'colname' for both, but we work around it). The column translation logic here has been adjusted to retrieve the rightmost token, rather than the second token, so it works in both cases. Workaround courtesy Tony Roberts. fixes #3211 --- doc/build/changelog/changelog_09.rst | 14 ++++ lib/sqlalchemy/dialects/sqlite/base.py | 8 +- test/dialect/test_sqlite.py | 138 ++++++++++++++++++++++++--------- 3 files changed, 121 insertions(+), 39 deletions(-) diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index e5d6703e3..e3d9175cb 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -13,6 +13,20 @@ .. changelog:: :version: 0.9.8 + .. change:: + :tags: bug, sqlite + :versions: 1.0.0 + :tickets: 3211 + + When selecting from a UNION using an attached database file, + the pysqlite driver reports column names in cursor.description + as 'dbname.tablename.colname', instead of 'tablename.colname' as + it normally does for a UNION (note that it's supposed to just be + 'colname' for both, but we work around it). The column translation + logic here has been adjusted to retrieve the rightmost token, rather + than the second token, so it works in both cases. Workaround + courtesy Tony Roberts. + .. change:: :tags: bug, postgresql :versions: 1.0.0 diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 817834b7d..335b35c94 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -713,10 +713,12 @@ class SQLiteExecutionContext(default.DefaultExecutionContext): return self.execution_options.get("sqlite_raw_colnames", False) def _translate_colname(self, colname): - # adjust for dotted column names. SQLite in the case of UNION may - # store col names as "tablename.colname" in cursor.description + # adjust for dotted column names. SQLite + # in the case of UNION may store col names as + # "tablename.colname", or if using an attached database, + # "database.tablename.colname", in cursor.description if not self._preserve_raw_colnames and "." in colname: - return colname.split(".")[1], colname + return colname.split(".")[-1], colname else: return colname, None diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py index ae7212245..124208dbe 100644 --- a/test/dialect/test_sqlite.py +++ b/test/dialect/test_sqlite.py @@ -11,7 +11,7 @@ from sqlalchemy import Table, select, bindparam, Column,\ UniqueConstraint from sqlalchemy.types import Integer, String, Boolean, DateTime, Date, Time from sqlalchemy import types as sqltypes -from sqlalchemy import event +from sqlalchemy import event, inspect from sqlalchemy.util import u, ue from sqlalchemy import exc, sql, schema, pool, util from sqlalchemy.dialects.sqlite import base as sqlite, \ @@ -480,40 +480,6 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults): assert u('méil') in result.keys() assert ue('\u6e2c\u8a66') in result.keys() - def test_attached_as_schema(self): - cx = testing.db.connect() - try: - cx.execute('ATTACH DATABASE ":memory:" AS test_schema') - dialect = cx.dialect - assert dialect.get_table_names(cx, 'test_schema') == [] - meta = MetaData(cx) - Table('created', meta, Column('id', Integer), - schema='test_schema') - alt_master = Table('sqlite_master', meta, autoload=True, - schema='test_schema') - meta.create_all(cx) - eq_(dialect.get_table_names(cx, 'test_schema'), ['created']) - assert len(alt_master.c) > 0 - meta.clear() - reflected = Table('created', meta, autoload=True, - schema='test_schema') - assert len(reflected.c) == 1 - cx.execute(reflected.insert(), dict(id=1)) - r = cx.execute(reflected.select()).fetchall() - assert list(r) == [(1, )] - cx.execute(reflected.update(), dict(id=2)) - r = cx.execute(reflected.select()).fetchall() - assert list(r) == [(2, )] - cx.execute(reflected.delete(reflected.c.id == 2)) - r = cx.execute(reflected.select()).fetchall() - assert list(r) == [] - - # note that sqlite_master is cleared, above - - meta.drop_all() - assert dialect.get_table_names(cx, 'test_schema') == [] - finally: - cx.execute('DETACH DATABASE test_schema') def test_file_path_is_absolute(self): d = pysqlite_dialect.dialect() @@ -532,7 +498,6 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults): e = create_engine('sqlite+pysqlite:///foo.db') assert e.pool.__class__ is pool.NullPool - def test_dont_reflect_autoindex(self): meta = MetaData(testing.db) t = Table('foo', meta, Column('bar', String, primary_key=True)) @@ -577,6 +542,107 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults): meta.drop_all() +class AttachedMemoryDBTest(fixtures.TestBase): + __only_on__ = 'sqlite' + + dbname = None + + def setUp(self): + self.conn = conn = testing.db.connect() + if self.dbname is None: + dbname = ':memory:' + else: + dbname = self.dbname + conn.execute('ATTACH DATABASE "%s" AS test_schema' % dbname) + self.metadata = MetaData() + + def tearDown(self): + self.metadata.drop_all(self.conn) + self.conn.execute('DETACH DATABASE test_schema') + if self.dbname: + os.remove(self.dbname) + + def _fixture(self): + meta = self.metadata + ct = Table( + 'created', meta, + Column('id', Integer), + Column('name', String), + schema='test_schema') + + meta.create_all(self.conn) + return ct + + def test_no_tables(self): + insp = inspect(self.conn) + eq_(insp.get_table_names("test_schema"), []) + + def test_table_names_present(self): + self._fixture() + insp = inspect(self.conn) + eq_(insp.get_table_names("test_schema"), ["created"]) + + def test_table_names_system(self): + self._fixture() + insp = inspect(self.conn) + eq_(insp.get_table_names("test_schema"), ["created"]) + + def test_reflect_system_table(self): + meta = MetaData(self.conn) + alt_master = Table( + 'sqlite_master', meta, autoload=True, + autoload_with=self.conn, + schema='test_schema') + assert len(alt_master.c) > 0 + + def test_reflect_user_table(self): + self._fixture() + + m2 = MetaData() + c2 = Table('created', m2, autoload=True, autoload_with=self.conn) + eq_(len(c2.c), 2) + + def test_crud(self): + ct = self._fixture() + + self.conn.execute(ct.insert(), {'id': 1, 'name': 'foo'}) + eq_( + self.conn.execute(ct.select()).fetchall(), + [(1, 'foo')] + ) + + self.conn.execute(ct.update(), {'id': 2, 'name': 'bar'}) + eq_( + self.conn.execute(ct.select()).fetchall(), + [(2, 'bar')] + ) + self.conn.execute(ct.delete()) + eq_( + self.conn.execute(ct.select()).fetchall(), + [] + ) + + def test_col_targeting(self): + ct = self._fixture() + + self.conn.execute(ct.insert(), {'id': 1, 'name': 'foo'}) + row = self.conn.execute(ct.select()).first() + eq_(row['id'], 1) + eq_(row['name'], 'foo') + + def test_col_targeting_union(self): + ct = self._fixture() + + self.conn.execute(ct.insert(), {'id': 1, 'name': 'foo'}) + row = self.conn.execute(ct.select().union(ct.select())).first() + eq_(row['id'], 1) + eq_(row['name'], 'foo') + + +class AttachedFileDBTest(AttachedMemoryDBTest): + dbname = 'attached_db.db' + + class SQLTest(fixtures.TestBase, AssertsCompiledSQL): """Tests SQLite-dialect specific compilation.""" -- cgit v1.2.1 From 690532131d8ce8250c62f1d3e27405902df03e70 Mon Sep 17 00:00:00 2001 From: ndparker Date: Thu, 2 Oct 2014 22:00:31 +0200 Subject: cleanup exception handling - use new exception hierarchy (since python 2.5) --- lib/sqlalchemy/dialects/mssql/base.py | 4 +--- lib/sqlalchemy/dialects/mysql/base.py | 8 ++------ lib/sqlalchemy/dialects/mysql/mysqlconnector.py | 4 +--- lib/sqlalchemy/engine/base.py | 4 ---- lib/sqlalchemy/exc.py | 6 +++--- lib/sqlalchemy/orm/mapper.py | 4 +--- lib/sqlalchemy/pool.py | 12 ++++-------- lib/sqlalchemy/sql/elements.py | 4 +--- lib/sqlalchemy/testing/engines.py | 4 ---- lib/sqlalchemy/testing/provision.py | 20 +++++--------------- lib/sqlalchemy/util/langhelpers.py | 12 +++--------- 11 files changed, 21 insertions(+), 61 deletions(-) diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index ade2d00cb..dad02ee0f 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -846,9 +846,7 @@ class MSExecutionContext(default.DefaultExecutionContext): "SET IDENTITY_INSERT %s OFF" % self.dialect.identifier_preparer. format_table( self.compiled.statement.table))) - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: pass def get_result_proxy(self): diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 0994e2416..277644022 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -2317,9 +2317,7 @@ class MySQLDialect(default.DefaultDialect): # basic operations via autocommit fail. try: dbapi_connection.commit() - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: if self.server_version_info < (3, 23, 15): args = sys.exc_info()[1].args if args and args[0] == 1064: @@ -2331,9 +2329,7 @@ class MySQLDialect(default.DefaultDialect): try: dbapi_connection.rollback() - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: if self.server_version_info < (3, 23, 15): args = sys.exc_info()[1].args if args and args[0] == 1064: diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py index afa61d85b..6077ce53e 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -103,9 +103,7 @@ class MySQLDialect_mysqlconnector(MySQLDialect): 'client_flags', ClientFlag.get_default()) client_flags |= ClientFlag.FOUND_ROWS opts['client_flags'] = client_flags - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: pass return [[], opts] diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index b3460c240..220679c12 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -1055,8 +1055,6 @@ class Connection(Connectable): """ try: cursor.close() - except (SystemExit, KeyboardInterrupt): - raise except Exception: # log the error through the connection pool's logger. self.engine.pool.logger.error( @@ -1135,8 +1133,6 @@ class Connection(Connectable): per_fn = fn(ctx) if per_fn is not None: ctx.chained_exception = newraise = per_fn - except (SystemExit, KeyboardInterrupt): - raise except Exception as _raised: # handler raises an exception - stop processing newraise = _raised diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py index a82bae33f..5d35dc2e7 100644 --- a/lib/sqlalchemy/exc.py +++ b/lib/sqlalchemy/exc.py @@ -280,7 +280,9 @@ class DBAPIError(StatementError): connection_invalidated=False): # Don't ever wrap these, just return them directly as if # DBAPIError didn't exist. - if isinstance(orig, (KeyboardInterrupt, SystemExit, DontWrapMixin)): + if (isinstance(orig, BaseException) and + not isinstance(orig, Exception)) or \ + isinstance(orig, DontWrapMixin): return orig if orig is not None: @@ -310,8 +312,6 @@ class DBAPIError(StatementError): def __init__(self, statement, params, orig, connection_invalidated=False): try: text = str(orig) - except (KeyboardInterrupt, SystemExit): - raise except Exception as e: text = 'Error in str() of DB-API-generated exception: ' + str(e) StatementError.__init__( diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index bd28975dd..e4c0de188 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -2649,9 +2649,7 @@ def configure_mappers(): mapper._expire_memoizations() mapper.dispatch.mapper_configured( mapper, mapper.class_) - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: exc = sys.exc_info()[1] if not hasattr(exc, '_configure_failed'): mapper._configure_failed = exc diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py index 0c162e984..a174df784 100644 --- a/lib/sqlalchemy/pool.py +++ b/lib/sqlalchemy/pool.py @@ -248,9 +248,7 @@ class Pool(log.Identified): self.logger.debug("Closing connection %r", connection) try: self._dialect.do_close(connection) - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: self.logger.error("Exception closing connection %r", connection, exc_info=True) @@ -569,12 +567,12 @@ def _finalize_fairy(connection, connection_record, # Immediately close detached instances if not connection_record: pool._close_connection(connection) - except Exception as e: + except BaseException as e: pool.logger.error( "Exception during reset or similar", exc_info=True) if connection_record: connection_record.invalidate(e=e) - if isinstance(e, (SystemExit, KeyboardInterrupt)): + if not isinstance(e, Exception): raise if connection_record: @@ -842,9 +840,7 @@ class SingletonThreadPool(Pool): for conn in self._all_conns: try: conn.close() - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: # pysqlite won't even let you close a conn from a thread # that didn't create it pass diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 8e18a22fe..4bc1683dd 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -3491,9 +3491,7 @@ def _string_or_unprintable(element): else: try: return str(element) - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: return "unprintable element %r" % element diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py index 67c13231e..1284f9c2a 100644 --- a/lib/sqlalchemy/testing/engines.py +++ b/lib/sqlalchemy/testing/engines.py @@ -37,8 +37,6 @@ class ConnectionKiller(object): def _safe(self, fn): try: fn() - except (SystemExit, KeyboardInterrupt): - raise except Exception as e: warnings.warn( "testing_reaper couldn't " @@ -168,8 +166,6 @@ class ReconnectFixture(object): def _safe(self, fn): try: fn() - except (SystemExit, KeyboardInterrupt): - raise except Exception as e: warnings.warn( "ReconnectFixture couldn't " diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py index 64688d6b5..c8f7fdf30 100644 --- a/lib/sqlalchemy/testing/provision.py +++ b/lib/sqlalchemy/testing/provision.py @@ -120,9 +120,7 @@ def _pg_create_db(cfg, eng, ident): isolation_level="AUTOCOMMIT") as conn: try: _pg_drop_db(cfg, conn, ident) - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: pass currentdb = conn.scalar("select current_database()") conn.execute("CREATE DATABASE %s TEMPLATE %s" % (ident, currentdb)) @@ -133,9 +131,7 @@ def _mysql_create_db(cfg, eng, ident): with eng.connect() as conn: try: _mysql_drop_db(cfg, conn, ident) - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: pass conn.execute("CREATE DATABASE %s" % ident) conn.execute("CREATE DATABASE %s_test_schema" % ident) @@ -177,21 +173,15 @@ def _mysql_drop_db(cfg, eng, ident): with eng.connect() as conn: try: conn.execute("DROP DATABASE %s_test_schema" % ident) - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: pass try: conn.execute("DROP DATABASE %s_test_schema_2" % ident) - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: pass try: conn.execute("DROP DATABASE %s" % ident) - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: pass diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index 75c6e7b46..ea8f30e9d 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -490,9 +490,7 @@ def generic_repr(obj, additional_kw=(), to_inspect=None, omit_kwarg=()): val = getattr(obj, arg, missing) if val is not missing and val != defval: output.append('%s=%r' % (arg, val)) - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: pass if additional_kw: @@ -501,9 +499,7 @@ def generic_repr(obj, additional_kw=(), to_inspect=None, omit_kwarg=()): val = getattr(obj, arg, missing) if val is not missing and val != defval: output.append('%s=%r' % (arg, val)) - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: pass return "%s(%s)" % (obj.__class__.__name__, ", ".join(output)) @@ -1189,9 +1185,7 @@ def warn_exception(func, *args, **kwargs): """ try: return func(*args, **kwargs) - except (SystemExit, KeyboardInterrupt): - raise - except: + except Exception: warn("%s('%s') ignored" % sys.exc_info()[0:2]) -- cgit v1.2.1 From 7e0835409cf5ab7f7ac27dc1f619773d84bfe53d Mon Sep 17 00:00:00 2001 From: ndparker Date: Sat, 4 Oct 2014 00:51:01 +0200 Subject: add simple tests for new name argument for Table.tometadata() --- test/sql/test_metadata.py | 44 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py index 304e2cc5f..0e845d555 100644 --- a/test/sql/test_metadata.py +++ b/test/sql/test_metadata.py @@ -678,6 +678,50 @@ class ToMetaDataTest(fixtures.TestBase, ComparesTables): eq_(str(table_c.join(table2_c).onclause), 'myschema.mytable.myid = myschema.othertable.myid') + def test_change_name_retain_metadata(self): + meta = MetaData() + + table = Table('mytable', meta, + Column('myid', Integer, primary_key=True), + Column('name', String(40), nullable=True), + Column('description', String(30), + CheckConstraint("description='hi'")), + UniqueConstraint('name'), + schema='myschema', + ) + + table2 = table.tometadata(table.metadata, name='newtable') + table3 = table.tometadata(table.metadata, schema='newschema', + name='newtable') + + assert table.metadata is table2.metadata + assert table.metadata is table3.metadata + eq_((table.name, table2.name, table3.name), + ('mytable', 'newtable', 'newtable')) + eq_((table.key, table2.key, table3.key), + ('myschema.mytable', 'myschema.newtable', 'newschema.newtable')) + + def test_change_name_change_metadata(self): + meta = MetaData() + meta2 = MetaData() + + table = Table('mytable', meta, + Column('myid', Integer, primary_key=True), + Column('name', String(40), nullable=True), + Column('description', String(30), + CheckConstraint("description='hi'")), + UniqueConstraint('name'), + schema='myschema', + ) + + table2 = table.tometadata(meta2, name='newtable') + + assert table.metadata is not table2.metadata + eq_((table.name, table2.name), + ('mytable', 'newtable')) + eq_((table.key, table2.key), + ('myschema.mytable', 'myschema.newtable')) + def _assert_fk(self, t2, schema, expected, referred_schema_fn=None): m2 = MetaData() existing_schema = t2.schema -- cgit v1.2.1 From 49e750a1d788710b89764c4dd9c0ddbf9b1f38ad Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 4 Oct 2014 12:18:20 -0400 Subject: - changelog, migration for pr github:134 --- doc/build/changelog/changelog_10.rst | 12 ++++ doc/build/changelog/migration_10.rst | 16 +++++ doc/build/core/sqlelement.rst | 5 ++ lib/sqlalchemy/sql/elements.py | 29 +++++++++ lib/sqlalchemy/sql/functions.py | 9 ++- test/sql/test_compiler.py | 91 ---------------------------- test/sql/test_functions.py | 112 ++++++++++++++++++++++++++++++++++- 7 files changed, 181 insertions(+), 93 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 4d5ab1f06..a746abeac 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,18 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: feature, postgresql + :pullreq: github:134 + + Added support for the FILTER keyword as applied to aggregate + functions, supported by Postgresql 9.4. Pull request + courtesy Ilja Everilä. + + .. seealso:: + + :ref:`feature_gh134` + .. change:: :tags: bug, sql, engine :tickets: 3215 diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index 0e9dd8d7b..b0ac868ec 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -1026,6 +1026,22 @@ running 0.9 in production. :ticket:`2891` +.. _feature_gh134: + +Postgresql FILTER keyword +------------------------- + +The SQL standard FILTER keyword for aggregate functions is now supported +by Postgresql as of 9.4. SQLAlchemy allows this using +:meth:`.FunctionElement.filter`:: + + func.count(1).filter(True) + +.. seealso:: + + :meth:`.FunctionElement.filter` + + :class:`.FunctionFilter` MySQL internal "no such table" exceptions not passed to event handlers ---------------------------------------------------------------------- diff --git a/doc/build/core/sqlelement.rst b/doc/build/core/sqlelement.rst index 61600e927..44a969dbb 100644 --- a/doc/build/core/sqlelement.rst +++ b/doc/build/core/sqlelement.rst @@ -35,6 +35,8 @@ used to construct any kind of typed SQL expression. .. autodata:: func +.. autofunction:: funcfilter + .. autofunction:: label .. autofunction:: literal @@ -109,6 +111,9 @@ used to construct any kind of typed SQL expression. .. autoclass:: sqlalchemy.sql.elements.False_ :members: +.. autoclass:: FunctionFilter + :members: + .. autoclass:: Label :members: diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 53838358d..db14031d2 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -2895,6 +2895,17 @@ class FunctionFilter(ColumnElement): which controls which rows are passed to it. It's supported only by certain database backends. + Invocation of :class:`.FunctionFilter` is via + :meth:`.FunctionElement.filter`:: + + func.count(1).filter(True) + + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.FunctionElement.filter` + """ __visit_name__ = 'funcfilter' @@ -2916,11 +2927,29 @@ class FunctionFilter(ColumnElement): This function is also available from the :data:`~.expression.func` construct itself via the :meth:`.FunctionElement.filter` method. + .. versionadded:: 1.0.0 + + .. seealso:: + + :meth:`.FunctionElement.filter` + + """ self.func = func self.filter(*criterion) def filter(self, *criterion): + """Produce an additional FILTER against the function. + + This method adds additional criteria to the initial criteria + set up by :meth:`.FunctionElement.filter`. + + Multiple criteria are joined together at SQL render time + via ``AND``. + + + """ + for criterion in list(criterion): criterion = _expression_literal_as_text(criterion) diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index a07eca8c6..9280c7d60 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -131,7 +131,14 @@ class FunctionElement(Executable, ColumnElement, FromClause): from sqlalchemy import funcfilter funcfilter(func.count(1), True) - See :func:`~.expression.funcfilter` for a full description. + .. versionadded:: 1.0.0 + + .. seealso:: + + :class:`.FunctionFilter` + + :func:`.funcfilter` + """ if not criterion: diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py index ed13e8455..3e6b87351 100644 --- a/test/sql/test_compiler.py +++ b/test/sql/test_compiler.py @@ -2190,97 +2190,6 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): "(ORDER BY mytable.myid + :myid_1) AS anon_1 FROM mytable" ) - def test_funcfilter(self): - self.assert_compile( - func.count(1).filter(), - "count(:param_1)" - ) - self.assert_compile( - func.count(1).filter( - table1.c.name != None - ), - "count(:param_1) FILTER (WHERE mytable.name IS NOT NULL)" - ) - self.assert_compile( - func.count(1).filter( - table1.c.name == None, - table1.c.myid > 0 - ), - "count(:param_1) FILTER (WHERE mytable.name IS NULL AND " - "mytable.myid > :myid_1)" - ) - - self.assert_compile( - select([func.count(1).filter( - table1.c.description != None - ).label('foo')]), - "SELECT count(:param_1) FILTER (WHERE mytable.description " - "IS NOT NULL) AS foo FROM mytable" - ) - - # test from_obj generation. - # from func: - self.assert_compile( - select([ - func.max(table1.c.name).filter( - literal_column('description') != None - ) - ]), - "SELECT max(mytable.name) FILTER (WHERE description " - "IS NOT NULL) AS anon_1 FROM mytable" - ) - # from criterion: - self.assert_compile( - select([ - func.count(1).filter( - table1.c.name == 'name' - ) - ]), - "SELECT count(:param_1) FILTER (WHERE mytable.name = :name_1) " - "AS anon_1 FROM mytable" - ) - - # test chaining: - self.assert_compile( - select([ - func.count(1).filter( - table1.c.name == 'name' - ).filter( - table1.c.description == 'description' - ) - ]), - "SELECT count(:param_1) FILTER (WHERE " - "mytable.name = :name_1 AND mytable.description = :description_1) " - "AS anon_1 FROM mytable" - ) - - # test filtered windowing: - self.assert_compile( - select([ - func.rank().filter( - table1.c.name > 'foo' - ).over( - order_by=table1.c.name - ) - ]), - "SELECT rank() FILTER (WHERE mytable.name > :name_1) " - "OVER (ORDER BY mytable.name) AS anon_1 FROM mytable" - ) - - self.assert_compile( - select([ - func.rank().filter( - table1.c.name > 'foo' - ).over( - order_by=table1.c.name, - partition_by=['description'] - ) - ]), - "SELECT rank() FILTER (WHERE mytable.name > :name_1) " - "OVER (PARTITION BY mytable.description ORDER BY mytable.name) " - "AS anon_1 FROM mytable" - ) - def test_date_between(self): import datetime table = Table('dt', metadata, diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py index 9b7649e63..ec8d9b5c0 100644 --- a/test/sql/test_functions.py +++ b/test/sql/test_functions.py @@ -1,7 +1,8 @@ from sqlalchemy.testing import eq_ import datetime from sqlalchemy import func, select, Integer, literal, DateTime, Table, \ - Column, Sequence, MetaData, extract, Date, String, bindparam + Column, Sequence, MetaData, extract, Date, String, bindparam, \ + literal_column from sqlalchemy.sql import table, column from sqlalchemy import sql, util from sqlalchemy.sql.compiler import BIND_TEMPLATES @@ -15,6 +16,13 @@ from sqlalchemy.testing import fixtures, AssertsCompiledSQL, engines from sqlalchemy.dialects import sqlite, postgresql, mysql, oracle +table1 = table('mytable', + column('myid', Integer), + column('name', String), + column('description', String), + ) + + class CompileTest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = 'default' @@ -367,6 +375,108 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): expr = func.rows("foo").alias('bar') assert len(expr.c) + def test_funcfilter_empty(self): + self.assert_compile( + func.count(1).filter(), + "count(:param_1)" + ) + + def test_funcfilter_criterion(self): + self.assert_compile( + func.count(1).filter( + table1.c.name != None + ), + "count(:param_1) FILTER (WHERE mytable.name IS NOT NULL)" + ) + + def test_funcfilter_compound_criterion(self): + self.assert_compile( + func.count(1).filter( + table1.c.name == None, + table1.c.myid > 0 + ), + "count(:param_1) FILTER (WHERE mytable.name IS NULL AND " + "mytable.myid > :myid_1)" + ) + + def test_funcfilter_label(self): + self.assert_compile( + select([func.count(1).filter( + table1.c.description != None + ).label('foo')]), + "SELECT count(:param_1) FILTER (WHERE mytable.description " + "IS NOT NULL) AS foo FROM mytable" + ) + + def test_funcfilter_fromobj_fromfunc(self): + # test from_obj generation. + # from func: + self.assert_compile( + select([ + func.max(table1.c.name).filter( + literal_column('description') != None + ) + ]), + "SELECT max(mytable.name) FILTER (WHERE description " + "IS NOT NULL) AS anon_1 FROM mytable" + ) + + def test_funcfilter_fromobj_fromcriterion(self): + # from criterion: + self.assert_compile( + select([ + func.count(1).filter( + table1.c.name == 'name' + ) + ]), + "SELECT count(:param_1) FILTER (WHERE mytable.name = :name_1) " + "AS anon_1 FROM mytable" + ) + + def test_funcfilter_chaining(self): + # test chaining: + self.assert_compile( + select([ + func.count(1).filter( + table1.c.name == 'name' + ).filter( + table1.c.description == 'description' + ) + ]), + "SELECT count(:param_1) FILTER (WHERE " + "mytable.name = :name_1 AND mytable.description = :description_1) " + "AS anon_1 FROM mytable" + ) + + def test_funcfilter_windowing_orderby(self): + # test filtered windowing: + self.assert_compile( + select([ + func.rank().filter( + table1.c.name > 'foo' + ).over( + order_by=table1.c.name + ) + ]), + "SELECT rank() FILTER (WHERE mytable.name > :name_1) " + "OVER (ORDER BY mytable.name) AS anon_1 FROM mytable" + ) + + def test_funcfilter_windowing_orderby_partitionby(self): + self.assert_compile( + select([ + func.rank().filter( + table1.c.name > 'foo' + ).over( + order_by=table1.c.name, + partition_by=['description'] + ) + ]), + "SELECT rank() FILTER (WHERE mytable.name > :name_1) " + "OVER (PARTITION BY mytable.description ORDER BY mytable.name) " + "AS anon_1 FROM mytable" + ) + class ExecuteTest(fixtures.TestBase): -- cgit v1.2.1 From b510b990947cb8e461df8877ca0f011542b7a319 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 4 Oct 2014 18:57:01 -0400 Subject: - use provide_metadata for new unique constraint / index tests - add a test for PG reflection of unique index without any unique constraint - for PG, don't include 'duplicates_constraint' in the entry if the index does not actually mirror a constraint - use a distinct method for unique constraint reflection within table - catch unique constraint not implemented condition; this may be within some dialects and also is expected to be supported by Alembic tests - migration + changelogs for #3184 - add individual doc notes as well to MySQL, Postgreql fixes #3184 --- doc/build/changelog/changelog_10.rst | 32 +++++++++++++ doc/build/changelog/migration_10.rst | 57 +++++++++++++++++++++++ lib/sqlalchemy/dialects/mysql/base.py | 23 ++++++++++ lib/sqlalchemy/dialects/postgresql/base.py | 52 ++++++++++++++++----- lib/sqlalchemy/engine/reflection.py | 15 +++++- test/dialect/mysql/test_reflection.py | 39 ++++++++-------- test/dialect/postgresql/test_reflection.py | 74 ++++++++++++++++++++---------- 7 files changed, 237 insertions(+), 55 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index a746abeac..69b5b29c1 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,38 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: feature, sql + :tickets: 3184 + :pullreq: bitbucket:30 + + The :class:`.UniqueConstraint` construct is now included when + reflecting a :class:`.Table` object, for databases where this + is applicable. In order to achieve this + with sufficient accuracy, MySQL and Postgresql now contain features + that correct for the duplication of indexes and unique constraints + when reflecting tables, indexes, and constraints. + In the case of MySQL, there is not actually a "unique constraint" + concept independent of a "unique index", so for this backend + :class:`.UniqueConstraint` continues to remain non-present for a + reflected :class:`.Table`. For Postgresql, the query used to + detect indexes against ``pg_index`` has been improved to check for + the same construct in ``pg_constraint``, and the implicitly + constructed unique index is not included with a + reflected :class:`.Table`. + + In both cases, the :meth:`.Inspector.get_indexes` and the + :meth:`.Inspector.get_unique_constraints` methods return both + constructs individually, but include a new token + ``duplicates_constraint`` in the case of Postgresql or + ``duplicates_index`` in the case + of MySQL to indicate when this condition is detected. + Pull request courtesy Johannes Erdfelt. + + .. seealso:: + + :ref:`feature_3184` + .. change:: :tags: feature, postgresql :pullreq: github:134 diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index b0ac868ec..439ec4c67 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -50,6 +50,62 @@ wishes to support the new feature should now call upon the ``._limit_clause`` and ``._offset_clause`` attributes to receive the full SQL expression, rather than the integer value. +.. _feature_3184: + +UniqueConstraint is now part of the Table reflection process +------------------------------------------------------------ + +A :class:`.Table` object populated using ``autoload=True`` will now +include :class:`.UniqueConstraint` constructs as well as +:class:`.Index` constructs. This logic has a few caveats for +Postgresql and Mysql: + +Postgresql +^^^^^^^^^^ + +Postgresql has the behavior such that when a UNIQUE constraint is +created, it implicitly creates a UNIQUE INDEX corresponding to that +constraint as well. The :meth:`.Inspector.get_indexes` and the +:meth:`.Inspector.get_unique_constraints` methods will continue to +**both** return these entries distinctly, where +:meth:`.Inspector.get_indexes` now features a token +``duplicates_constraint`` within the index entry indicating the +corresponding constraint when detected. However, when performing +full table reflection using ``Table(..., autoload=True)``, the +:class:`.Index` construct is detected as being linked to the +:class:`.UniqueConstraint`, and is **not** present within the +:attr:`.Table.indexes` collection; only the :class:`.UniqueConstraint` +will be present in the :attr:`.Table.constraints` collection. This +deduplication logic works by joining to the ``pg_constraint`` table +when querying ``pg_index`` to see if the two constructs are linked. + +MySQL +^^^^^ + +MySQL does not have separate concepts for a UNIQUE INDEX and a UNIQUE +constraint. While it supports both syntaxes when creating tables and indexes, +it does not store them any differently. The +:meth:`.Inspector.get_indexes` +and the :meth:`.Inspector.get_unique_constraints` methods will continue to +**both** return an entry for a UNIQUE index in MySQL, +where :meth:`.Inspector.get_unique_constraints` features a new token +``duplicates_index`` within the constraint entry indicating that this is a +dupe entry corresponding to that index. However, when performing +full table reflection using ``Table(..., autoload=True)``, +the :class:`.UniqueConstraint` construct is +**not** part of the fully reflected :class:`.Table` construct under any +circumstances; this construct is always represented by a :class:`.Index` +with the ``unique=True`` setting present in the :attr:`.Table.indexes` +collection. + +.. seealso:: + + :ref:`postgresql_index_reflection` + + :ref:`mysql_unique_constraints` + +:ticket:`3184` + Behavioral Improvements ======================= @@ -1043,6 +1099,7 @@ by Postgresql as of 9.4. SQLAlchemy allows this using :class:`.FunctionFilter` + MySQL internal "no such table" exceptions not passed to event handlers ---------------------------------------------------------------------- diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 2f85a3626..793e6566d 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -341,6 +341,29 @@ reflection will not include foreign keys. For these tables, you may supply a :ref:`mysql_storage_engines` +.. _mysql_unique_constraints: + +MySQL Unique Constraints and Reflection +--------------------------------------- + +SQLAlchemy supports both the :class:`.Index` construct with the +flag ``unique=True``, indicating a UNIQUE index, as well as the +:class:`.UniqueConstraint` construct, representing a UNIQUE constraint. +Both objects/syntaxes are supported by MySQL when emitting DDL to create +these constraints. However, MySQL does not have a unique constraint +construct that is separate from a unique index; that is, the "UNIQUE" +constraint on MySQL is equivalent to creating a "UNIQUE INDEX". + +When reflecting these constructs, the :meth:`.Inspector.get_indexes` +and the :meth:`.Inspector.get_unique_constraints` methods will **both** +return an entry for a UNIQUE index in MySQL. However, when performing +full table reflection using ``Table(..., autoload=True)``, +the :class:`.UniqueConstraint` construct is +**not** part of the fully reflected :class:`.Table` construct under any +circumstances; this construct is always represented by a :class:`.Index` +with the ``unique=True`` setting present in the :attr:`.Table.indexes` +collection. + .. _mysql_timestamp_null: diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index 556493b3c..baa640eaa 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -402,6 +402,28 @@ underlying CREATE INDEX command, so it *must* be a valid index type for your version of PostgreSQL. +.. _postgresql_index_reflection: + +Postgresql Index Reflection +--------------------------- + +The Postgresql database creates a UNIQUE INDEX implicitly whenever the +UNIQUE CONSTRAINT construct is used. When inspecting a table using +:class:`.Inspector`, the :meth:`.Inspector.get_indexes` +and the :meth:`.Inspector.get_unique_constraints` will report on these +two constructs distinctly; in the case of the index, the key +``duplicates_constraint`` will be present in the index entry if it is +detected as mirroring a constraint. When performing reflection using +``Table(..., autoload=True)``, the UNIQUE INDEX is **not** returned +in :attr:`.Table.indexes` when it is detected as mirroring a +:class:`.UniqueConstraint` in the :attr:`.Table.constraints` collection. + +.. versionchanged:: 1.0.0 - :class:`.Table` reflection now includes + :class:`.UniqueConstraint` objects present in the :attr:`.Table.constraints` + collection; the Postgresql backend will no longer include a "mirrored" + :class:`.Index` construct in :attr:`.Table.indexes` if it is detected + as corresponding to a unique constraint. + Special Reflection Options -------------------------- @@ -2523,21 +2545,27 @@ class PGDialect(default.DefaultDialect): % idx_name) sv_idx_name = idx_name + has_idx = idx_name in indexes index = indexes[idx_name] if col is not None: index['cols'][col_num] = col - index['key'] = [int(k.strip()) for k in idx_key.split()] - index['unique'] = unique - index['duplicates_constraint'] = (None if conrelid is None - else idx_name) - - return [ - {'name': name, - 'unique': idx['unique'], - 'column_names': [idx['cols'][i] for i in idx['key']], - 'duplicates_constraint': idx['duplicates_constraint']} - for name, idx in indexes.items() - ] + if not has_idx: + index['key'] = [int(k.strip()) for k in idx_key.split()] + index['unique'] = unique + if conrelid is not None: + index['duplicates_constraint'] = idx_name + + result = [] + for name, idx in indexes.items(): + entry = { + 'name': name, + 'unique': idx['unique'], + 'column_names': [idx['cols'][i] for i in idx['key']] + } + if 'duplicates_constraint' in idx: + entry['duplicates_constraint'] = idx['duplicates_constraint'] + result.append(entry) + return result @reflection.cache def get_unique_constraints(self, connection, table_name, diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index 15c2dd195..2a1def86a 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -508,6 +508,10 @@ class Inspector(object): table_name, schema, table, cols_by_orig_name, include_columns, exclude_columns, reflection_options) + self._reflect_unique_constraints( + table_name, schema, table, cols_by_orig_name, + include_columns, exclude_columns, reflection_options) + def _reflect_column( self, table, col_d, include_columns, exclude_columns, cols_by_orig_name): @@ -665,8 +669,17 @@ class Inspector(object): sa_schema.Index(name, *idx_cols, **dict(unique=unique)) + def _reflect_unique_constraints( + self, table_name, schema, table, cols_by_orig_name, + include_columns, exclude_columns, reflection_options): + # Unique Constraints - constraints = self.get_unique_constraints(table_name, schema) + try: + constraints = self.get_unique_constraints(table_name, schema) + except NotImplementedError: + # optional dialect feature + return + for const_d in constraints: conname = const_d['name'] columns = const_d['column_names'] diff --git a/test/dialect/mysql/test_reflection.py b/test/dialect/mysql/test_reflection.py index b8f72b942..99733e397 100644 --- a/test/dialect/mysql/test_reflection.py +++ b/test/dialect/mysql/test_reflection.py @@ -283,36 +283,37 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults): view_names = dialect.get_view_names(connection, "information_schema") self.assert_('TABLES' in view_names) + @testing.provide_metadata def test_reflection_with_unique_constraint(self): insp = inspect(testing.db) - uc_table = Table('mysql_uc', MetaData(testing.db), + meta = self.metadata + uc_table = Table('mysql_uc', meta, Column('a', String(10)), UniqueConstraint('a', name='uc_a')) - try: - uc_table.create() + uc_table.create() - # MySQL converts unique constraints into unique indexes and - # the 0.9 API returns it as both an index and a constraint - indexes = set(i['name'] for i in insp.get_indexes('mysql_uc')) - constraints = set(i['name'] - for i in insp.get_unique_constraints('mysql_uc')) + # MySQL converts unique constraints into unique indexes. + # separately we get both + indexes = dict((i['name'], i) for i in insp.get_indexes('mysql_uc')) + constraints = set(i['name'] + for i in insp.get_unique_constraints('mysql_uc')) - self.assert_('uc_a' in indexes) - self.assert_('uc_a' in constraints) + self.assert_('uc_a' in indexes) + self.assert_(indexes['uc_a']['unique']) + self.assert_('uc_a' in constraints) - # However, upon creating a Table object via reflection, it should - # only appear as a unique index and not a constraint - reflected = Table('mysql_uc', MetaData(testing.db), autoload=True) + # reflection here favors the unique index, as that's the + # more "official" MySQL construct + reflected = Table('mysql_uc', MetaData(testing.db), autoload=True) - indexes = set(i.name for i in reflected.indexes) - constraints = set(uc.name for uc in reflected.constraints) + indexes = dict((i.name, i) for i in reflected.indexes) + constraints = set(uc.name for uc in reflected.constraints) - self.assert_('uc_a' in indexes) - self.assert_('uc_a' not in constraints) - finally: - uc_table.drop() + self.assert_('uc_a' in indexes) + self.assert_(indexes['uc_a'].unique) + self.assert_('uc_a' not in constraints) class RawReflectionTest(fixtures.TestBase): diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index fc013c72a..8de71216e 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -7,7 +7,8 @@ from sqlalchemy.testing import fixtures from sqlalchemy import testing from sqlalchemy import inspect from sqlalchemy import Table, Column, MetaData, Integer, String, \ - PrimaryKeyConstraint, ForeignKey, join, Sequence, UniqueConstraint + PrimaryKeyConstraint, ForeignKey, join, Sequence, UniqueConstraint, \ + Index from sqlalchemy import exc import sqlalchemy as sa from sqlalchemy.dialects.postgresql import base as postgresql @@ -656,8 +657,7 @@ class ReflectionTest(fixtures.TestBase): conn.execute("ALTER TABLE t RENAME COLUMN x to y") ind = testing.db.dialect.get_indexes(conn, "t", None) - eq_(ind, [{'unique': False, 'duplicates_constraint': None, - 'column_names': ['y'], 'name': 'idx1'}]) + eq_(ind, [{'unique': False, 'column_names': ['y'], 'name': 'idx1'}]) conn.close() @testing.provide_metadata @@ -804,37 +804,65 @@ class ReflectionTest(fixtures.TestBase): 'labels': ['sad', 'ok', 'happy'] }]) + @testing.provide_metadata def test_reflection_with_unique_constraint(self): insp = inspect(testing.db) - uc_table = Table('pgsql_uc', MetaData(testing.db), + meta = self.metadata + uc_table = Table('pgsql_uc', meta, Column('a', String(10)), UniqueConstraint('a', name='uc_a')) - try: - uc_table.create() + uc_table.create() - # PostgreSQL will create an implicit index for a unique - # constraint. As a result, the 0.9 API returns it as both - # an index and a constraint - indexes = set(i['name'] for i in insp.get_indexes('pgsql_uc')) - constraints = set(i['name'] - for i in insp.get_unique_constraints('pgsql_uc')) + # PostgreSQL will create an implicit index for a unique + # constraint. Separately we get both + indexes = set(i['name'] for i in insp.get_indexes('pgsql_uc')) + constraints = set(i['name'] + for i in insp.get_unique_constraints('pgsql_uc')) - self.assert_('uc_a' in indexes) - self.assert_('uc_a' in constraints) + self.assert_('uc_a' in indexes) + self.assert_('uc_a' in constraints) - # However, upon creating a Table object via reflection, it should - # only appear as a unique constraint and not an index - reflected = Table('pgsql_uc', MetaData(testing.db), autoload=True) + # reflection corrects for the dupe + reflected = Table('pgsql_uc', MetaData(testing.db), autoload=True) - indexes = set(i.name for i in reflected.indexes) - constraints = set(uc.name for uc in reflected.constraints) + indexes = set(i.name for i in reflected.indexes) + constraints = set(uc.name for uc in reflected.constraints) - self.assert_('uc_a' not in indexes) - self.assert_('uc_a' in constraints) - finally: - uc_table.drop() + self.assert_('uc_a' not in indexes) + self.assert_('uc_a' in constraints) + + @testing.provide_metadata + def test_reflect_unique_index(self): + insp = inspect(testing.db) + + meta = self.metadata + + # a unique index OTOH we are able to detect is an index + # and not a unique constraint + uc_table = Table('pgsql_uc', meta, + Column('a', String(10)), + Index('ix_a', 'a', unique=True)) + + uc_table.create() + + indexes = dict((i['name'], i) for i in insp.get_indexes('pgsql_uc')) + constraints = set(i['name'] + for i in insp.get_unique_constraints('pgsql_uc')) + + self.assert_('ix_a' in indexes) + assert indexes['ix_a']['unique'] + self.assert_('ix_a' not in constraints) + + reflected = Table('pgsql_uc', MetaData(testing.db), autoload=True) + + indexes = dict((i.name, i) for i in reflected.indexes) + constraints = set(uc.name for uc in reflected.constraints) + + self.assert_('ix_a' in indexes) + assert indexes['ix_a'].unique + self.assert_('ix_a' not in constraints) class CustomTypeReflectionTest(fixtures.TestBase): -- cgit v1.2.1 From c4dee4cbb7aa1a5486a90d3c48ffd8500d3a6dd2 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 4 Oct 2014 19:10:23 -0400 Subject: - move some things that are features to the features --- doc/build/changelog/migration_10.rst | 426 +++++++++++++++++------------------ 1 file changed, 213 insertions(+), 213 deletions(-) diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index 439ec4c67..b3e5f6f15 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -25,6 +25,141 @@ potentially backwards-incompatible changes. New Features ============ +.. _feature_3150: + +Improvements to declarative mixins, ``@declared_attr`` and related features +---------------------------------------------------------------------------- + +The declarative system in conjunction with :class:`.declared_attr` has been +overhauled to support new capabilities. + +A function decorated with :class:`.declared_attr` is now called only **after** +any mixin-based column copies are generated. This means the function can +call upon mixin-established columns and will receive a reference to the correct +:class:`.Column` object:: + + class HasFooBar(object): + foobar = Column(Integer) + + @declared_attr + def foobar_prop(cls): + return column_property('foobar: ' + cls.foobar) + + class SomeClass(HasFooBar, Base): + __tablename__ = 'some_table' + id = Column(Integer, primary_key=True) + +Above, ``SomeClass.foobar_prop`` will be invoked against ``SomeClass``, +and ``SomeClass.foobar`` will be the final :class:`.Column` object that is +to be mapped to ``SomeClass``, as opposed to the non-copied object present +directly on ``HasFooBar``, even though the columns aren't mapped yet. + +The :class:`.declared_attr` function now **memoizes** the value +that's returned on a per-class basis, so that repeated calls to the same +attribute will return the same value. We can alter the example to illustrate +this:: + + class HasFooBar(object): + @declared_attr + def foobar(cls): + return Column(Integer) + + @declared_attr + def foobar_prop(cls): + return column_property('foobar: ' + cls.foobar) + + class SomeClass(HasFooBar, Base): + __tablename__ = 'some_table' + id = Column(Integer, primary_key=True) + +Previously, ``SomeClass`` would be mapped with one particular copy of +the ``foobar`` column, but the ``foobar_prop`` by calling upon ``foobar`` +a second time would produce a different column. The value of +``SomeClass.foobar`` is now memoized during declarative setup time, so that +even before the attribute is mapped by the mapper, the interim column +value will remain consistent no matter how many times the +:class:`.declared_attr` is called upon. + +The two behaviors above should help considerably with declarative definition +of many types of mapper properties that derive from other attributes, where +the :class:`.declared_attr` function is called upon from other +:class:`.declared_attr` functions locally present before the class is +actually mapped. + +For a pretty slim edge case where one wishes to build a declarative mixin +that establishes distinct columns per subclass, a new modifier +:attr:`.declared_attr.cascading` is added. With this modifier, the +decorated function will be invoked individually for each class in the +mapped inheritance hierarchy. While this is already the behavior for +special attributes such as ``__table_args__`` and ``__mapper_args__``, +for columns and other properties the behavior by default assumes that attribute +is affixed to the base class only, and just inherited from subclasses. +With :attr:`.declared_attr.cascading`, individual behaviors can be +applied:: + + class HasSomeAttribute(object): + @declared_attr.cascading + def some_id(cls): + if has_inherited_table(cls): + return Column(ForeignKey('myclass.id'), primary_key=True) + else: + return Column(Integer, primary_key=True) + + return Column('id', Integer, primary_key=True) + + class MyClass(HasSomeAttribute, Base): + "" + # ... + + class MySubClass(MyClass): + "" + # ... + +.. seealso:: + + :ref:`mixin_inheritance_columns` + +Finally, the :class:`.AbstractConcreteBase` class has been reworked +so that a relationship or other mapper property can be set up inline +on the abstract base:: + + from sqlalchemy import Column, Integer, ForeignKey + from sqlalchemy.orm import relationship + from sqlalchemy.ext.declarative import (declarative_base, declared_attr, + AbstractConcreteBase) + + Base = declarative_base() + + class Something(Base): + __tablename__ = u'something' + id = Column(Integer, primary_key=True) + + + class Abstract(AbstractConcreteBase, Base): + id = Column(Integer, primary_key=True) + + @declared_attr + def something_id(cls): + return Column(ForeignKey(Something.id)) + + @declared_attr + def something(cls): + return relationship(Something) + + + class Concrete(Abstract): + __tablename__ = u'cca' + __mapper_args__ = {'polymorphic_identity': 'cca', 'concrete': True} + + +The above mapping will set up a table ``cca`` with both an ``id`` and +a ``something_id`` column, and ``Concrete`` will also have a relationship +``something``. The new feature is that ``Abstract`` will also have an +independently configured relationship ``something`` that builds against +the polymorphic union of the base. + +:ticket:`3150` :ticket:`2670` :ticket:`3149` :ticket:`2952` :ticket:`3050` + .. _feature_3034: Select/Query LIMIT / OFFSET may be specified as an arbitrary SQL expression @@ -50,6 +185,84 @@ wishes to support the new feature should now call upon the ``._limit_clause`` and ``._offset_clause`` attributes to receive the full SQL expression, rather than the integer value. +.. _change_2051: + +New Postgresql Table options +----------------------------- + +Added support for PG table options TABLESPACE, ON COMMIT, +WITH(OUT) OIDS, and INHERITS, when rendering DDL via +the :class:`.Table` construct. + +.. seealso:: + + :ref:`postgresql_table_options` + +:ticket:`2051` + +.. _feature_get_enums: + +New get_enums() method with Postgresql Dialect +---------------------------------------------- + +The :func:`.inspect` method returns a :class:`.PGInspector` object in the +case of Postgresql, which includes a new :meth:`.PGInspector.get_enums` +method that returns information on all available ``ENUM`` types:: + + from sqlalchemy import inspect, create_engine + + engine = create_engine("postgresql+psycopg2://host/dbname") + insp = inspect(engine) + print(insp.get_enums()) + +.. seealso:: + + :meth:`.PGInspector.get_enums` + +.. _feature_2891: + +Postgresql Dialect reflects Materialized Views, Foreign Tables +-------------------------------------------------------------- + +Changes are as follows: + +* the :class:`Table` construct with ``autoload=True`` will now match a name + that exists in the database as a materialized view or foriegn table. + +* :meth:`.Inspector.get_view_names` will return plain and materialized view + names. + +* :meth:`.Inspector.get_table_names` does **not** change for Postgresql, it + continues to return only the names of plain tables. + +* A new method :meth:`.PGInspector.get_foreign_table_names` is added which + will return the names of tables that are specifically marked as "foreign" + in the Postgresql schema tables. + +The change to reflection involves adding ``'m'`` and ``'f'`` to the list +of qualifiers we use when querying ``pg_class.relkind``, but this change +is new in 1.0.0 to avoid any backwards-incompatible surprises for those +running 0.9 in production. + +:ticket:`2891` + +.. _feature_gh134: + +Postgresql FILTER keyword +------------------------- + +The SQL standard FILTER keyword for aggregate functions is now supported +by Postgresql as of 9.4. SQLAlchemy allows this using +:meth:`.FunctionElement.filter`:: + + func.count(1).filter(True) + +.. seealso:: + + :meth:`.FunctionElement.filter` + + :class:`.FunctionFilter` + .. _feature_3184: UniqueConstraint is now part of the Table reflection process @@ -177,7 +390,6 @@ MacBookPro is 31 seconds on 0.9 and 26 seconds on 1.0, the extra time spent setting up very large memory buffers. - .. _feature_3176: New KeyedTuple implementation dramatically faster @@ -363,140 +575,6 @@ Renders:: :ticket:`3177` -.. _feature_3150: - -Improvements to declarative mixins, ``@declared_attr`` and related features ----------------------------------------------------------------------------- - -The declarative system in conjunction with :class:`.declared_attr` has been -overhauled to support new capabilities. - -A function decorated with :class:`.declared_attr` is now called only **after** -any mixin-based column copies are generated. This means the function can -call upon mixin-established columns and will receive a reference to the correct -:class:`.Column` object:: - - class HasFooBar(object): - foobar = Column(Integer) - - @declared_attr - def foobar_prop(cls): - return column_property('foobar: ' + cls.foobar) - - class SomeClass(HasFooBar, Base): - __tablename__ = 'some_table' - id = Column(Integer, primary_key=True) - -Above, ``SomeClass.foobar_prop`` will be invoked against ``SomeClass``, -and ``SomeClass.foobar`` will be the final :class:`.Column` object that is -to be mapped to ``SomeClass``, as opposed to the non-copied object present -directly on ``HasFooBar``, even though the columns aren't mapped yet. - -The :class:`.declared_attr` function now **memoizes** the value -that's returned on a per-class basis, so that repeated calls to the same -attribute will return the same value. We can alter the example to illustrate -this:: - - class HasFooBar(object): - @declared_attr - def foobar(cls): - return Column(Integer) - - @declared_attr - def foobar_prop(cls): - return column_property('foobar: ' + cls.foobar) - - class SomeClass(HasFooBar, Base): - __tablename__ = 'some_table' - id = Column(Integer, primary_key=True) - -Previously, ``SomeClass`` would be mapped with one particular copy of -the ``foobar`` column, but the ``foobar_prop`` by calling upon ``foobar`` -a second time would produce a different column. The value of -``SomeClass.foobar`` is now memoized during declarative setup time, so that -even before the attribute is mapped by the mapper, the interim column -value will remain consistent no matter how many times the -:class:`.declared_attr` is called upon. - -The two behaviors above should help considerably with declarative definition -of many types of mapper properties that derive from other attributes, where -the :class:`.declared_attr` function is called upon from other -:class:`.declared_attr` functions locally present before the class is -actually mapped. - -For a pretty slim edge case where one wishes to build a declarative mixin -that establishes distinct columns per subclass, a new modifier -:attr:`.declared_attr.cascading` is added. With this modifier, the -decorated function will be invoked individually for each class in the -mapped inheritance hierarchy. While this is already the behavior for -special attributes such as ``__table_args__`` and ``__mapper_args__``, -for columns and other properties the behavior by default assumes that attribute -is affixed to the base class only, and just inherited from subclasses. -With :attr:`.declared_attr.cascading`, individual behaviors can be -applied:: - - class HasSomeAttribute(object): - @declared_attr.cascading - def some_id(cls): - if has_inherited_table(cls): - return Column(ForeignKey('myclass.id'), primary_key=True) - else: - return Column(Integer, primary_key=True) - - return Column('id', Integer, primary_key=True) - - class MyClass(HasSomeAttribute, Base): - "" - # ... - - class MySubClass(MyClass): - "" - # ... - -.. seealso:: - - :ref:`mixin_inheritance_columns` - -Finally, the :class:`.AbstractConcreteBase` class has been reworked -so that a relationship or other mapper property can be set up inline -on the abstract base:: - - from sqlalchemy import Column, Integer, ForeignKey - from sqlalchemy.orm import relationship - from sqlalchemy.ext.declarative import (declarative_base, declared_attr, - AbstractConcreteBase) - - Base = declarative_base() - - class Something(Base): - __tablename__ = u'something' - id = Column(Integer, primary_key=True) - - - class Abstract(AbstractConcreteBase, Base): - id = Column(Integer, primary_key=True) - - @declared_attr - def something_id(cls): - return Column(ForeignKey(Something.id)) - - @declared_attr - def something(cls): - return relationship(Something) - - - class Concrete(Abstract): - __tablename__ = u'cca' - __mapper_args__ = {'polymorphic_identity': 'cca', 'concrete': True} - - -The above mapping will set up a table ``cca`` with both an ``id`` and -a ``something_id`` column, and ``Concrete`` will also have a relationship -``something``. The new feature is that ``Abstract`` will also have an -independently configured relationship ``something`` that builds against -the polymorphic union of the base. - -:ticket:`3150` :ticket:`2670` :ticket:`3149` :ticket:`2952` :ticket:`3050` .. _bug_3188: @@ -1021,84 +1099,6 @@ A :class:`.Table` can be set up for reflection by passing Dialect Changes =============== -.. _change_2051: - -New Postgresql Table options ------------------------------ - -Added support for PG table options TABLESPACE, ON COMMIT, -WITH(OUT) OIDS, and INHERITS, when rendering DDL via -the :class:`.Table` construct. - -.. seealso:: - - :ref:`postgresql_table_options` - -:ticket:`2051` - -.. _feature_get_enums: - -New get_enums() method with Postgresql Dialect ----------------------------------------------- - -The :func:`.inspect` method returns a :class:`.PGInspector` object in the -case of Postgresql, which includes a new :meth:`.PGInspector.get_enums` -method that returns information on all available ``ENUM`` types:: - - from sqlalchemy import inspect, create_engine - - engine = create_engine("postgresql+psycopg2://host/dbname") - insp = inspect(engine) - print(insp.get_enums()) - -.. seealso:: - - :meth:`.PGInspector.get_enums` - -.. _feature_2891: - -Postgresql Dialect reflects Materialized Views, Foreign Tables --------------------------------------------------------------- - -Changes are as follows: - -* the :class:`Table` construct with ``autoload=True`` will now match a name - that exists in the database as a materialized view or foriegn table. - -* :meth:`.Inspector.get_view_names` will return plain and materialized view - names. - -* :meth:`.Inspector.get_table_names` does **not** change for Postgresql, it - continues to return only the names of plain tables. - -* A new method :meth:`.PGInspector.get_foreign_table_names` is added which - will return the names of tables that are specifically marked as "foreign" - in the Postgresql schema tables. - -The change to reflection involves adding ``'m'`` and ``'f'`` to the list -of qualifiers we use when querying ``pg_class.relkind``, but this change -is new in 1.0.0 to avoid any backwards-incompatible surprises for those -running 0.9 in production. - -:ticket:`2891` - -.. _feature_gh134: - -Postgresql FILTER keyword -------------------------- - -The SQL standard FILTER keyword for aggregate functions is now supported -by Postgresql as of 9.4. SQLAlchemy allows this using -:meth:`.FunctionElement.filter`:: - - func.count(1).filter(True) - -.. seealso:: - - :meth:`.FunctionElement.filter` - - :class:`.FunctionFilter` - MySQL internal "no such table" exceptions not passed to event handlers ---------------------------------------------------------------------- -- cgit v1.2.1 From 119674dd250d7da8be92a262ad53396c2fac4e0f Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 6 Oct 2014 18:23:42 -0400 Subject: - The execution options passed to an :class:`.Engine` either via :paramref:`.create_engine.execution_options` or :meth:`.Engine.update_execution_options` are not passed to the special :class:`.Connection` used to initialize the dialect within the "first connect" event; dialects will usually perform their own queries in this phase, and none of the current available options should be applied here. In particular, the "autocommit" option was causing an attempt to autocommit within this initial connect which would fail with an AttributeError due to the non-standard state of the :class:`.Connection`. fixes #3200 --- doc/build/changelog/changelog_09.rst | 17 +++++++++++++++++ lib/sqlalchemy/engine/strategies.py | 1 + test/engine/test_execute.py | 8 ++++++++ 3 files changed, 26 insertions(+) diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index e3d9175cb..7dd50739e 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -13,6 +13,23 @@ .. changelog:: :version: 0.9.8 + .. change:: + :tags: bug, engine + :versions: 1.0.0 + :tickets: 3200 + + The execution options passed to an :class:`.Engine` either via + :paramref:`.create_engine.execution_options` or + :meth:`.Engine.update_execution_options` are not passed to the + special :class:`.Connection` used to initialize the dialect + within the "first connect" event; dialects will usually + perform their own queries in this phase, and none of the + current available options should be applied here. In + particular, the "autocommit" option was causing an attempt to + autocommit within this initial connect which would fail with + an AttributeError due to the non-standard state of the + :class:`.Connection`. + .. change:: :tags: bug, sqlite :versions: 1.0.0 diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py index 38206be89..49438372b 100644 --- a/lib/sqlalchemy/engine/strategies.py +++ b/lib/sqlalchemy/engine/strategies.py @@ -162,6 +162,7 @@ class DefaultEngineStrategy(EngineStrategy): def first_connect(dbapi_connection, connection_record): c = base.Connection(engine, connection=dbapi_connection, _has_events=False) + c._execution_options = {} dialect.initialize(c) event.listen(pool, 'first_connect', first_connect, once=True) diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py index 219a145c6..a80d157ed 100644 --- a/test/engine/test_execute.py +++ b/test/engine/test_execute.py @@ -478,6 +478,14 @@ class ExecuteTest(fixtures.TestBase): eq_(canary, ["l1", "l2", "l3", "l1", "l2"]) + @testing.requires.ad_hoc_engines + def test_autocommit_option_no_issue_first_connect(self): + eng = create_engine(testing.db.url) + eng.update_execution_options(autocommit=True) + conn = eng.connect() + eq_(conn._execution_options, {"autocommit": True}) + conn.close() + @testing.requires.ad_hoc_engines def test_generative_engine_event_dispatch_hasevents(self): def l1(*arg, **kw): -- cgit v1.2.1 From 5a10b6a455f9ad7be752469cbaa503857ae9fda2 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 6 Oct 2014 18:33:05 -0400 Subject: - Fixed "'NoneType' object has no attribute 'concrete'" error when using :class:`.AbstractConcreteBase` in conjunction with a subclass that declares ``__abstract__``. fixes #3185 --- doc/build/changelog/changelog_09.rst | 9 +++++++++ lib/sqlalchemy/ext/declarative/api.py | 2 +- test/ext/declarative/test_inheritance.py | 29 +++++++++++++++++++++++++++++ 3 files changed, 39 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index 7dd50739e..692c6e392 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -13,6 +13,15 @@ .. changelog:: :version: 0.9.8 + .. change:: + :tags: bug, declarative, orm + :versions: 1.0.0 + :tickets: 3185 + + Fixed "'NoneType' object has no attribute 'concrete'" error + when using :class:`.AbstractConcreteBase` in conjunction with + a subclass that declares ``__abstract__``. + .. change:: :tags: bug, engine :versions: 1.0.0 diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py index e84b21ad2..66fe05fd0 100644 --- a/lib/sqlalchemy/ext/declarative/api.py +++ b/lib/sqlalchemy/ext/declarative/api.py @@ -547,7 +547,7 @@ class AbstractConcreteBase(ConcreteBase): for scls in cls.__subclasses__(): sm = _mapper_or_none(scls) - if sm.concrete and cls in scls.__bases__: + if sm and sm.concrete and cls in scls.__bases__: sm._set_concrete_base(m) diff --git a/test/ext/declarative/test_inheritance.py b/test/ext/declarative/test_inheritance.py index 5a99c9c5a..6ea37e4d3 100644 --- a/test/ext/declarative/test_inheritance.py +++ b/test/ext/declarative/test_inheritance.py @@ -1388,3 +1388,32 @@ class ConcreteExtensionConfigTest( "WHERE something.id = pjoin.something_id AND something.id = :id_1)" ) + def test_abstract_in_hierarchy(self): + class Document(Base, AbstractConcreteBase): + doctype = Column(String) + + class ContactDocument(Document): + __abstract__ = True + + send_method = Column(String) + + class ActualDocument(ContactDocument): + __tablename__ = 'actual_documents' + __mapper_args__ = { + 'concrete': True, + 'polymorphic_identity': 'actual'} + + id = Column(Integer, primary_key=True) + + configure_mappers() + session = Session() + self.assert_compile( + session.query(Document), + "SELECT pjoin.doctype AS pjoin_doctype, " + "pjoin.send_method AS pjoin_send_method, " + "pjoin.id AS pjoin_id, pjoin.type AS pjoin_type " + "FROM (SELECT actual_documents.doctype AS doctype, " + "actual_documents.send_method AS send_method, " + "actual_documents.id AS id, 'actual' AS type " + "FROM actual_documents) AS pjoin" + ) \ No newline at end of file -- cgit v1.2.1 From 2885f78e4e20c2ae8552594ac7f0231b1bee4aad Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 7 Oct 2014 12:43:27 -0400 Subject: - adjustment for ref #3200 as we need an immutabledict() here so that union() can be called, in the case of a dialect that uses execution options inside of initialize() (e.g. oursql) --- lib/sqlalchemy/engine/strategies.py | 2 +- test/engine/test_execute.py | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py index 49438372b..398ef8df6 100644 --- a/lib/sqlalchemy/engine/strategies.py +++ b/lib/sqlalchemy/engine/strategies.py @@ -162,7 +162,7 @@ class DefaultEngineStrategy(EngineStrategy): def first_connect(dbapi_connection, connection_record): c = base.Connection(engine, connection=dbapi_connection, _has_events=False) - c._execution_options = {} + c._execution_options = util.immutabledict() dialect.initialize(c) event.listen(pool, 'first_connect', first_connect, once=True) diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py index a80d157ed..e0bba0afa 100644 --- a/test/engine/test_execute.py +++ b/test/engine/test_execute.py @@ -486,6 +486,18 @@ class ExecuteTest(fixtures.TestBase): eq_(conn._execution_options, {"autocommit": True}) conn.close() + @testing.requires.ad_hoc_engines + def test_dialect_init_uses_options(self): + eng = create_engine(testing.db.url) + + def my_init(connection): + connection.execution_options(foo='bar').execute(select([1])) + + with patch.object(eng.dialect, "initialize", my_init): + conn = eng.connect() + eq_(conn._execution_options, {}) + conn.close() + @testing.requires.ad_hoc_engines def test_generative_engine_event_dispatch_hasevents(self): def l1(*arg, **kw): -- cgit v1.2.1 From c55d10940b1953fce2129e7bcfe1728bea33cd1d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 7 Oct 2014 14:06:46 -0400 Subject: - clean up unicode docs and clarify that client_encoding at the engine level is not the same thing as at the connect args level. --- lib/sqlalchemy/dialects/postgresql/psycopg2.py | 66 +++++++++++++++++++++----- 1 file changed, 53 insertions(+), 13 deletions(-) diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index 9dfd53e22..1a2a1ffe4 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -32,10 +32,25 @@ psycopg2-specific keyword arguments which are accepted by way of enabling this mode on a per-execution basis. * ``use_native_unicode``: Enable the usage of Psycopg2 "native unicode" mode per connection. True by default. + + .. seealso:: + + :ref:`psycopg2_disable_native_unicode` + * ``isolation_level``: This option, available for all PostgreSQL dialects, includes the ``AUTOCOMMIT`` isolation level when using the psycopg2 - dialect. See :ref:`psycopg2_isolation_level`. + dialect. + + .. seealso:: + + :ref:`psycopg2_isolation_level` + +* ``client_encoding``: sets the client encoding in a libpq-agnostic way, + using psycopg2's ``set_client_encoding()`` method. + + .. seealso:: + :ref:`psycopg2_unicode` Unix Domain Connections ------------------------ @@ -75,8 +90,10 @@ The following DBAPI-specific options are respected when used with If ``None`` or not set, the ``server_side_cursors`` option of the :class:`.Engine` is used. -Unicode -------- +.. _psycopg2_unicode: + +Unicode with Psycopg2 +---------------------- By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE`` extension, such that the DBAPI receives and returns all strings as Python @@ -84,27 +101,51 @@ Unicode objects directly - SQLAlchemy passes these values through without change. Psycopg2 here will encode/decode string values based on the current "client encoding" setting; by default this is the value in the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``. -Typically, this can be changed to ``utf-8``, as a more useful default:: +Typically, this can be changed to ``utf8``, as a more useful default:: + + # postgresql.conf file - #client_encoding = sql_ascii # actually, defaults to database + # client_encoding = sql_ascii # actually, defaults to database # encoding client_encoding = utf8 A second way to affect the client encoding is to set it within Psycopg2 -locally. SQLAlchemy will call psycopg2's ``set_client_encoding()`` -method (see: -http://initd.org/psycopg/docs/connection.html#connection.set_client_encoding) +locally. SQLAlchemy will call psycopg2's +:meth:`psycopg2:connection.set_client_encoding` method on all new connections based on the value passed to :func:`.create_engine` using the ``client_encoding`` parameter:: + # set_client_encoding() setting; + # works for *all* Postgresql versions engine = create_engine("postgresql://user:pass@host/dbname", client_encoding='utf8') This overrides the encoding specified in the Postgresql client configuration. +When using the parameter in this way, the psycopg2 driver emits +``SET client_encoding TO 'utf8'`` on the connection explicitly, and works +in all Postgresql versions. + +Note that the ``client_encoding`` setting as passed to :func:`.create_engine` +is **not the same** as the more recently added ``client_encoding`` parameter +now supported by libpq directly. This is enabled when ``client_encoding`` +is passed directly to ``psycopg2.connect()``, and from SQLAlchemy is passed +using the :paramref:`.create_engine.connect_args` parameter:: + + # libpq direct parameter setting; + # only works for Postgresql **9.1 and above** + engine = create_engine("postgresql://user:pass@host/dbname", + connect_args={'client_encoding': 'utf8'}) + + # using the query string is equivalent + engine = create_engine("postgresql://user:pass@host/dbname?client_encoding=utf8") + +The above parameter was only added to libpq as of version 9.1 of Postgresql, +so using the previous method is better for cross-version support. + +.. _psycopg2_disable_native_unicode: -.. versionadded:: 0.7.3 - The psycopg2-specific ``client_encoding`` parameter to - :func:`.create_engine`. +Disabling Native Unicode +^^^^^^^^^^^^^^^^^^^^^^^^ SQLAlchemy can also be instructed to skip the usage of the psycopg2 ``UNICODE`` extension and to instead utilize its own unicode encode/decode @@ -116,8 +157,7 @@ in and coerce from bytes on the way back, using the value of the :func:`.create_engine` ``encoding`` parameter, which defaults to ``utf-8``. SQLAlchemy's own unicode encode/decode functionality is steadily becoming -obsolete as more DBAPIs support unicode fully along with the approach of -Python 3; in modern usage psycopg2 should be relied upon to handle unicode. +obsolete as most DBAPIs now support unicode fully. Transactions ------------ -- cgit v1.2.1 From 81d1e0455a406560be468d1aacc37aa63bb4d717 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 9 Oct 2014 17:20:30 -0400 Subject: - Fixed bug where a fair number of SQL elements within the sql package would fail to ``__repr__()`` successfully, due to a missing ``description`` attribute that would then invoke a recursion overflow when an internal AttributeError would then re-invoke ``__repr__()``. fixes #3195 --- doc/build/changelog/changelog_09.rst | 11 +++++++++++ lib/sqlalchemy/sql/elements.py | 3 ++- test/sql/test_selectable.py | 24 ++++++++++++++++++++++++ 3 files changed, 37 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index 692c6e392..e2b893d07 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -13,6 +13,17 @@ .. changelog:: :version: 0.9.8 + .. change:: + :tags: bug, sql + :versions: 1.0.0 + :tickets: 3195 + + Fixed bug where a fair number of SQL elements within + the sql package would fail to ``__repr__()`` successfully, + due to a missing ``description`` attribute that would then invoke + a recursion overflow when an internal AttributeError would then + re-invoke ``__repr__()``. + .. change:: :tags: bug, declarative, orm :versions: 1.0.0 diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index db14031d2..c38d83106 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -228,6 +228,7 @@ class ClauseElement(Visitable): is_selectable = False is_clause_element = True + description = None _order_by_label_element = None _is_from_container = False @@ -540,7 +541,7 @@ class ClauseElement(Visitable): __nonzero__ = __bool__ def __repr__(self): - friendly = getattr(self, 'description', None) + friendly = self.description if friendly is None: return object.__repr__(self) else: diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py index a3b2b0e93..99d0cbe76 100644 --- a/test/sql/test_selectable.py +++ b/test/sql/test_selectable.py @@ -5,6 +5,7 @@ from sqlalchemy.testing import eq_, assert_raises, \ from sqlalchemy import * from sqlalchemy.testing import fixtures, AssertsCompiledSQL, \ AssertsExecutionResults +from sqlalchemy.sql import elements from sqlalchemy import testing from sqlalchemy.sql import util as sql_util, visitors, expression from sqlalchemy import exc @@ -1934,6 +1935,29 @@ class AnnotationsTest(fixtures.TestBase): assert (c2 == 5).left._annotations == {"foo": "bar", "bat": "hoho"} +class ReprTest(fixtures.TestBase): + def test_ensure_repr_elements(self): + for obj in [ + elements.Cast(1, 2), + elements.TypeClause(String()), + elements.ColumnClause('x'), + elements.BindParameter('q'), + elements.Null(), + elements.True_(), + elements.False_(), + elements.ClauseList(), + elements.BooleanClauseList.and_(), + elements.Tuple(), + elements.Case([]), + elements.Extract('foo', column('x')), + elements.UnaryExpression(column('x')), + elements.Grouping(column('x')), + elements.Over(func.foo()), + elements.Label('q', column('x')), + ]: + repr(obj) + + class WithLabelsTest(fixtures.TestBase): def _assert_labels_warning(self, s): -- cgit v1.2.1 From dd6389f171736abd28d777b6fde40403cab0c13e Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 9 Oct 2014 20:40:35 -0400 Subject: - The ON clause rendered when using :meth:`.Query.join`, :meth:`.Query.outerjoin`, or the standalone :func:`.orm.join` / :func:`.orm.outerjoin` functions to a single-inheritance subclass will now include the "single table criteria" in the ON clause even if the ON clause is otherwise hand-rolled; it is now added to the criteria using AND, the same way as if joining to a single-table target using relationship or similar. fixes #3222 --- doc/build/changelog/changelog_10.rst | 14 ++++ lib/sqlalchemy/orm/util.py | 10 +++ test/orm/inheritance/test_single.py | 122 ++++++++++++++++++++++++++++++++++- 3 files changed, 144 insertions(+), 2 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 69b5b29c1..60dea0564 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,20 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: bug, orm + :tickets: 3222 + + The ON clause rendered when using :meth:`.Query.join`, + :meth:`.Query.outerjoin`, or the standalone :func:`.orm.join` / + :func:`.orm.outerjoin` functions to a single-inheritance subclass will + now include the "single table criteria" in the ON clause even + if the ON clause is otherwise hand-rolled; it is now added to the + criteria using AND, the same way as if joining to a single-table + target using relationship or similar. + + This is sort of in-between feature and bug. + .. change:: :tags: feature, sql :tickets: 3184 diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index 734f9d5e6..8d40ae21c 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -804,6 +804,16 @@ class _ORMJoin(expression.Join): expression.Join.__init__(self, left, right, onclause, isouter) + if not prop and getattr(right_info, 'mapper', None) \ + and right_info.mapper.single: + # if single inheritance target and we are using a manual + # or implicit ON clause, augment it the same way we'd augment the + # WHERE. + single_crit = right_info.mapper._single_table_criterion + if right_info.is_aliased_class: + single_crit = right_info._adapter.traverse(single_crit) + self.onclause = self.onclause & single_crit + def join(self, right, onclause=None, isouter=False, join_to_left=None): return _ORMJoin(self, right, onclause, isouter) diff --git a/test/orm/inheritance/test_single.py b/test/orm/inheritance/test_single.py index be42cce52..6112929b6 100644 --- a/test/orm/inheritance/test_single.py +++ b/test/orm/inheritance/test_single.py @@ -386,7 +386,7 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest): ] ) - def test_outer_join(self): + def test_outer_join_prop(self): Company, Employee, Engineer = self.classes.Company,\ self.classes.Employee,\ self.classes.Engineer @@ -407,7 +407,7 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest): "= employees.company_id AND employees.type IN (:type_1)" ) - def test_outer_join_alias(self): + def test_outer_join_prop_alias(self): Company, Employee, Engineer = self.classes.Company,\ self.classes.Employee,\ self.classes.Engineer @@ -431,6 +431,124 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest): ) + def test_outer_join_literal_onclause(self): + Company, Employee, Engineer = self.classes.Company,\ + self.classes.Employee,\ + self.classes.Engineer + companies, employees = self.tables.companies, self.tables.employees + + mapper(Company, companies, properties={ + 'engineers':relationship(Engineer) + }) + mapper(Employee, employees, polymorphic_on=employees.c.type) + mapper(Engineer, inherits=Employee, polymorphic_identity='engineer') + + sess = create_session() + self.assert_compile( + sess.query(Company, Engineer).outerjoin( + Engineer, Company.company_id == Engineer.company_id), + "SELECT companies.company_id AS companies_company_id, " + "companies.name AS companies_name, " + "employees.employee_id AS employees_employee_id, " + "employees.name AS employees_name, " + "employees.manager_data AS employees_manager_data, " + "employees.engineer_info AS employees_engineer_info, " + "employees.type AS employees_type, " + "employees.company_id AS employees_company_id FROM companies " + "LEFT OUTER JOIN employees ON " + "companies.company_id = employees.company_id " + "AND employees.type IN (:type_1)" + ) + + def test_outer_join_literal_onclause_alias(self): + Company, Employee, Engineer = self.classes.Company,\ + self.classes.Employee,\ + self.classes.Engineer + companies, employees = self.tables.companies, self.tables.employees + + mapper(Company, companies, properties={ + 'engineers':relationship(Engineer) + }) + mapper(Employee, employees, polymorphic_on=employees.c.type) + mapper(Engineer, inherits=Employee, polymorphic_identity='engineer') + + eng_alias = aliased(Engineer) + sess = create_session() + self.assert_compile( + sess.query(Company, eng_alias).outerjoin( + eng_alias, Company.company_id == eng_alias.company_id), + "SELECT companies.company_id AS companies_company_id, " + "companies.name AS companies_name, " + "employees_1.employee_id AS employees_1_employee_id, " + "employees_1.name AS employees_1_name, " + "employees_1.manager_data AS employees_1_manager_data, " + "employees_1.engineer_info AS employees_1_engineer_info, " + "employees_1.type AS employees_1_type, " + "employees_1.company_id AS employees_1_company_id " + "FROM companies LEFT OUTER JOIN employees AS employees_1 ON " + "companies.company_id = employees_1.company_id " + "AND employees_1.type IN (:type_1)" + ) + + def test_outer_join_no_onclause(self): + Company, Employee, Engineer = self.classes.Company,\ + self.classes.Employee,\ + self.classes.Engineer + companies, employees = self.tables.companies, self.tables.employees + + mapper(Company, companies, properties={ + 'engineers':relationship(Engineer) + }) + mapper(Employee, employees, polymorphic_on=employees.c.type) + mapper(Engineer, inherits=Employee, polymorphic_identity='engineer') + + sess = create_session() + self.assert_compile( + sess.query(Company, Engineer).outerjoin( + Engineer), + "SELECT companies.company_id AS companies_company_id, " + "companies.name AS companies_name, " + "employees.employee_id AS employees_employee_id, " + "employees.name AS employees_name, " + "employees.manager_data AS employees_manager_data, " + "employees.engineer_info AS employees_engineer_info, " + "employees.type AS employees_type, " + "employees.company_id AS employees_company_id " + "FROM companies LEFT OUTER JOIN employees ON " + "companies.company_id = employees.company_id " + "AND employees.type IN (:type_1)" + ) + + def test_outer_join_no_onclause_alias(self): + Company, Employee, Engineer = self.classes.Company,\ + self.classes.Employee,\ + self.classes.Engineer + companies, employees = self.tables.companies, self.tables.employees + + mapper(Company, companies, properties={ + 'engineers':relationship(Engineer) + }) + mapper(Employee, employees, polymorphic_on=employees.c.type) + mapper(Engineer, inherits=Employee, polymorphic_identity='engineer') + + eng_alias = aliased(Engineer) + sess = create_session() + self.assert_compile( + sess.query(Company, eng_alias).outerjoin( + eng_alias), + "SELECT companies.company_id AS companies_company_id, " + "companies.name AS companies_name, " + "employees_1.employee_id AS employees_1_employee_id, " + "employees_1.name AS employees_1_name, " + "employees_1.manager_data AS employees_1_manager_data, " + "employees_1.engineer_info AS employees_1_engineer_info, " + "employees_1.type AS employees_1_type, " + "employees_1.company_id AS employees_1_company_id " + "FROM companies LEFT OUTER JOIN employees AS employees_1 ON " + "companies.company_id = employees_1.company_id " + "AND employees_1.type IN (:type_1)" + ) + def test_relationship_to_subclass(self): JuniorEngineer, Company, companies, Manager, \ Employee, employees, Engineer = (self.classes.JuniorEngineer, -- cgit v1.2.1 From be8d361be2863f1f6f71ba6269cce8594c19179c Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 9 Oct 2014 20:54:19 -0400 Subject: - add a migration note for ref #3222 --- doc/build/changelog/changelog_10.rst | 4 +++ doc/build/changelog/migration_10.rst | 61 ++++++++++++++++++++++++++++++++++++ 2 files changed, 65 insertions(+) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 60dea0564..5b0362c44 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -35,6 +35,10 @@ This is sort of in-between feature and bug. + .. seealso:: + + :ref:`migration_3222` + .. change:: :tags: feature, sql :tickets: 3184 diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index b3e5f6f15..a3b0c0308 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -576,6 +576,67 @@ Renders:: :ticket:`3177` +.. _migration_3222: + + +single-table-inheritance criteria added to all ON clauses unconditionally +------------------------------------------------------------------------- + +When joining to a single-table inheritance subclass target, the ORM always adds +the "single table criteria" when joining on a relationship. Given a +mapping as:: + + class Widget(Base): + __tablename__ = 'widget' + id = Column(Integer, primary_key=True) + type = Column(String) + related_id = Column(ForeignKey('related.id')) + related = relationship("Related", backref="widget") + __mapper_args__ = {'polymorphic_on': type} + + + class FooWidget(Widget): + __mapper_args__ = {'polymorphic_identity': 'foo'} + + + class Related(Base): + __tablename__ = 'related' + id = Column(Integer, primary_key=True) + +It's been the behavior for quite some time that a JOIN on the relationship +will render a "single inheritance" clause for the type:: + + s.query(Related).join(FooWidget, Related.widget).all() + +SQL output:: + + SELECT related.id AS related_id + FROM related JOIN widget ON related.id = widget.related_id AND widget.type IN (:type_1) + +Above, because we joined to a subclass ``FooWidget``, :meth:`.Query.join` +knew to add the ``AND widget.type IN ('foo')`` criteria to the ON clause. + +The change here is that the ``AND widget.type IN()`` criteria is now appended +to *any* ON clause, not just those generated from a relationship, +including one that is explicitly stated:: + + # ON clause will now render as + # related.id = widget.related_id AND widget.type IN (:type_1) + s.query(Related).join(FooWidget, FooWidget.related_id == Related.id).all() + +As well as the "implicit" join when no ON clause of any kind is stated:: + + # ON clause will now render as + # related.id = widget.related_id AND widget.type IN (:type_1) + s.query(Related).join(FooWidget).all() + +Previously, the ON clause for these would not include the single-inheritance +criteria. Applications that are already adding this criteria to work around +this will want to remove its explicit use, though it should continue to work +fine if the criteria happens to be rendered twice in the meantime. + +:ticket:`3222` + .. _bug_3188: ColumnProperty constructs work a lot better with aliases, order_by -- cgit v1.2.1 From 3a6cdff88429e047a684c0f5d6029a30d9aaa062 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 10 Oct 2014 15:31:07 -0400 Subject: - apply patches to correct for + sign in with_hint() docs, fixes #3036 --- lib/sqlalchemy/sql/selectable.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index b4df87e54..8198a6733 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -2572,7 +2572,7 @@ class Select(HasPrefixes, GenerativeSelect): following:: select([mytable]).\\ - with_hint(mytable, "+ index(%(name)s ix_mytable)") + with_hint(mytable, "index(%(name)s ix_mytable)") Would render SQL as:: @@ -2583,8 +2583,7 @@ class Select(HasPrefixes, GenerativeSelect): and Sybase simultaneously:: select([mytable]).\\ - with_hint( - mytable, "+ index(%(name)s ix_mytable)", 'oracle').\\ + with_hint(mytable, "index(%(name)s ix_mytable)", 'oracle').\\ with_hint(mytable, "WITH INDEX ix_mytable", 'sybase') .. seealso:: -- cgit v1.2.1 From 95be42c06ff4e5f3528de42bb04dcba228ea74c2 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 10 Oct 2014 17:15:19 -0400 Subject: - :meth:`.Insert.from_select` now includes Python and SQL-expression defaults if otherwise unspecified; the limitation where non- server column defaults aren't included in an INSERT FROM SELECT is now lifted and these expressions are rendered as constants into the SELECT statement. --- doc/build/changelog/changelog_10.rst | 13 ++++ doc/build/changelog/migration_10.rst | 30 +++++++++ doc/build/core/defaults.rst | 4 +- lib/sqlalchemy/sql/compiler.py | 2 +- lib/sqlalchemy/sql/crud.py | 97 +++++++++++++++++++++++------ lib/sqlalchemy/sql/dml.py | 26 +++++--- lib/sqlalchemy/testing/suite/test_insert.py | 37 ++++++++++- test/sql/test_defaults.py | 65 +++++++++++++++++++ test/sql/test_insert.py | 82 +++++++++++++++++++++++- 9 files changed, 323 insertions(+), 33 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 5b0362c44..3d471f192 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,19 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: feature, sql + + :meth:`.Insert.from_select` now includes Python and SQL-expression + defaults if otherwise unspecified; the limitation where non- + server column defaults aren't included in an INSERT FROM + SELECT is now lifted and these expressions are rendered as + constants into the SELECT statement. + + .. seealso:: + + :ref:`feature_insert_from_select_defaults` + .. change:: :tags: bug, orm :tickets: 3222 diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index a3b0c0308..951e39603 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -187,6 +187,36 @@ than the integer value. .. _change_2051: +.. _feature_insert_from_select_defaults: + +INSERT FROM SELECT now includes Python and SQL-expression defaults +------------------------------------------------------------------- + +:meth:`.Insert.from_select` now includes Python and SQL-expression defaults if +otherwise unspecified; the limitation where non-server column defaults +aren't included in an INSERT FROM SELECT is now lifted and these +expressions are rendered as constants into the SELECT statement:: + + from sqlalchemy import Table, Column, MetaData, Integer, select, func + + m = MetaData() + + t = Table( + 't', m, + Column('x', Integer), + Column('y', Integer, default=func.somefunction())) + + stmt = select([t.c.x]) + print t.insert().from_select(['x'], stmt) + +Will render:: + + INSERT INTO t (x, y) SELECT t.x, somefunction() AS somefunction_1 + FROM t + +The feature can be disabled using +:paramref:`.Insert.from_select.include_defaults`. + New Postgresql Table options ----------------------------- diff --git a/doc/build/core/defaults.rst b/doc/build/core/defaults.rst index 166273c18..1d55cd6c6 100644 --- a/doc/build/core/defaults.rst +++ b/doc/build/core/defaults.rst @@ -1,6 +1,8 @@ +.. module:: sqlalchemy.schema + .. _metadata_defaults_toplevel: + .. _metadata_defaults: -.. module:: sqlalchemy.schema Column Insert/Update Defaults ============================== diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 86f00d944..a6c30b7dc 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -1793,7 +1793,7 @@ class SQLCompiler(Compiled): text += " " + returning_clause if insert_stmt.select is not None: - text += " %s" % self.process(insert_stmt.select, **kw) + text += " %s" % self.process(self._insert_from_select, **kw) elif not crud_params and supports_default_values: text += " DEFAULT VALUES" elif insert_stmt._has_multi_parameters: diff --git a/lib/sqlalchemy/sql/crud.py b/lib/sqlalchemy/sql/crud.py index 1c1f661d2..831d05be1 100644 --- a/lib/sqlalchemy/sql/crud.py +++ b/lib/sqlalchemy/sql/crud.py @@ -89,18 +89,15 @@ def _get_crud_params(compiler, stmt, **kw): _col_bind_name, _getattr_col_key, values, kw) if compiler.isinsert and stmt.select_names: - # for an insert from select, we can only use names that - # are given, so only select for those names. - cols = (stmt.table.c[_column_as_key(name)] - for name in stmt.select_names) + _scan_insert_from_select_cols( + compiler, stmt, parameters, + _getattr_col_key, _column_as_key, + _col_bind_name, check_columns, values, kw) else: - # iterate through all table columns to maintain - # ordering, even for those cols that aren't included - cols = stmt.table.columns - - _scan_cols( - compiler, stmt, cols, parameters, - _getattr_col_key, _col_bind_name, check_columns, values, kw) + _scan_cols( + compiler, stmt, parameters, + _getattr_col_key, _column_as_key, + _col_bind_name, check_columns, values, kw) if parameters and stmt_parameters: check = set(parameters).intersection( @@ -118,13 +115,17 @@ def _get_crud_params(compiler, stmt, **kw): return values -def _create_bind_param(compiler, col, value, required=False, name=None): +def _create_bind_param( + compiler, col, value, process=True, required=False, name=None): if name is None: name = col.key bindparam = elements.BindParameter(name, value, type_=col.type, required=required) bindparam._is_crud = True - return bindparam._compiler_dispatch(compiler) + if process: + bindparam = bindparam._compiler_dispatch(compiler) + return bindparam + def _key_getters_for_crud_column(compiler): if compiler.isupdate and compiler.statement._extra_froms: @@ -162,14 +163,52 @@ def _key_getters_for_crud_column(compiler): return _column_as_key, _getattr_col_key, _col_bind_name +def _scan_insert_from_select_cols( + compiler, stmt, parameters, _getattr_col_key, + _column_as_key, _col_bind_name, check_columns, values, kw): + + need_pks, implicit_returning, \ + implicit_return_defaults, postfetch_lastrowid = \ + _get_returning_modifiers(compiler, stmt) + + cols = [stmt.table.c[_column_as_key(name)] + for name in stmt.select_names] + + compiler._insert_from_select = stmt.select + + add_select_cols = [] + if stmt.include_insert_from_select_defaults: + col_set = set(cols) + for col in stmt.table.columns: + if col not in col_set and col.default: + cols.append(col) + + for c in cols: + col_key = _getattr_col_key(c) + if col_key in parameters and col_key not in check_columns: + parameters.pop(col_key) + values.append((c, None)) + else: + _append_param_insert_select_hasdefault( + compiler, stmt, c, add_select_cols, kw) + + if add_select_cols: + values.extend(add_select_cols) + compiler._insert_from_select = compiler._insert_from_select._generate() + compiler._insert_from_select._raw_columns += tuple( + expr for col, expr in add_select_cols) + + def _scan_cols( - compiler, stmt, cols, parameters, _getattr_col_key, - _col_bind_name, check_columns, values, kw): + compiler, stmt, parameters, _getattr_col_key, + _column_as_key, _col_bind_name, check_columns, values, kw): need_pks, implicit_returning, \ implicit_return_defaults, postfetch_lastrowid = \ _get_returning_modifiers(compiler, stmt) + cols = stmt.table.columns + for c in cols: col_key = _getattr_col_key(c) if col_key in parameters and col_key not in check_columns: @@ -196,7 +235,8 @@ def _scan_cols( elif c.default is not None: _append_param_insert_hasdefault( - compiler, stmt, c, implicit_return_defaults, values, kw) + compiler, stmt, c, implicit_return_defaults, + values, kw) elif c.server_default is not None: if implicit_return_defaults and \ @@ -299,10 +339,8 @@ def _append_param_insert_hasdefault( elif not c.primary_key: compiler.postfetch.append(c) elif c.default.is_clause_element: - values.append( - (c, compiler.process( - c.default.arg.self_group(), **kw)) - ) + proc = compiler.process(c.default.arg.self_group(), **kw) + values.append((c, proc)) if implicit_return_defaults and \ c in implicit_return_defaults: @@ -317,6 +355,25 @@ def _append_param_insert_hasdefault( compiler.prefetch.append(c) +def _append_param_insert_select_hasdefault( + compiler, stmt, c, values, kw): + + if c.default.is_sequence: + if compiler.dialect.supports_sequences and \ + (not c.default.optional or + not compiler.dialect.sequences_optional): + proc = c.default + values.append((c, proc)) + elif c.default.is_clause_element: + proc = c.default.arg.self_group() + values.append((c, proc)) + else: + values.append( + (c, _create_bind_param(compiler, c, None, process=False)) + ) + compiler.prefetch.append(c) + + def _append_param_update( compiler, stmt, c, implicit_return_defaults, values, kw): diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py index 1934d0776..9f2ce7ce3 100644 --- a/lib/sqlalchemy/sql/dml.py +++ b/lib/sqlalchemy/sql/dml.py @@ -475,6 +475,7 @@ class Insert(ValuesBase): ValuesBase.__init__(self, table, values, prefixes) self._bind = bind self.select = self.select_names = None + self.include_insert_from_select_defaults = False self.inline = inline self._returning = returning self._validate_dialect_kwargs(dialect_kw) @@ -487,7 +488,7 @@ class Insert(ValuesBase): return () @_generative - def from_select(self, names, select): + def from_select(self, names, select, include_defaults=True): """Return a new :class:`.Insert` construct which represents an ``INSERT...FROM SELECT`` statement. @@ -506,6 +507,21 @@ class Insert(ValuesBase): is not checked before passing along to the database, the database would normally raise an exception if these column lists don't correspond. + :param include_defaults: if True, non-server default values and + SQL expressions as specified on :class:`.Column` objects + (as documented in :ref:`metadata_defaults_toplevel`) not + otherwise specified in the list of names will be rendered + into the INSERT and SELECT statements, so that these values are also + included in the data to be inserted. + + .. note:: A Python-side default that uses a Python callable function + will only be invoked **once** for the whole statement, and **not + per row**. + + .. versionadded:: 1.0.0 - :meth:`.Insert.from_select` now renders + Python-side and SQL expression column defaults into the + SELECT statement for columns otherwise not included in the + list of column names. .. versionchanged:: 1.0.0 an INSERT that uses FROM SELECT implies that the :paramref:`.insert.inline` flag is set to @@ -514,13 +530,6 @@ class Insert(ValuesBase): deals with an arbitrary number of rows, so the :attr:`.ResultProxy.inserted_primary_key` accessor does not apply. - .. note:: - - A SELECT..INSERT construct in SQL has no VALUES clause. Therefore - :class:`.Column` objects which utilize Python-side defaults - (e.g. as described at :ref:`metadata_defaults_toplevel`) - will **not** take effect when using :meth:`.Insert.from_select`. - .. versionadded:: 0.8.3 """ @@ -533,6 +542,7 @@ class Insert(ValuesBase): self.select_names = names self.inline = True + self.include_insert_from_select_defaults = include_defaults self.select = _interpret_as_select(select) def _copy_internals(self, clone=_clone, **kw): diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py index 92d3d93e5..c197145c7 100644 --- a/lib/sqlalchemy/testing/suite/test_insert.py +++ b/lib/sqlalchemy/testing/suite/test_insert.py @@ -4,7 +4,7 @@ from .. import exclusions from ..assertions import eq_ from .. import engines -from sqlalchemy import Integer, String, select, util +from sqlalchemy import Integer, String, select, literal_column from ..schema import Table, Column @@ -90,6 +90,13 @@ class InsertBehaviorTest(fixtures.TablesTest): Column('id', Integer, primary_key=True, autoincrement=False), Column('data', String(50)) ) + Table('includes_defaults', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(50)), + Column('x', Integer, default=5), + Column('y', Integer, + default=literal_column("2", type_=Integer) + 2)) def test_autoclose_on_insert(self): if requirements.returning.enabled: @@ -158,6 +165,34 @@ class InsertBehaviorTest(fixtures.TablesTest): ("data3", ), ("data3", )] ) + @requirements.insert_from_select + def test_insert_from_select_with_defaults(self): + table = self.tables.includes_defaults + config.db.execute( + table.insert(), + [ + dict(id=1, data="data1"), + dict(id=2, data="data2"), + dict(id=3, data="data3"), + ] + ) + + config.db.execute( + table.insert(inline=True). + from_select(("id", "data",), + select([table.c.id + 5, table.c.data]). + where(table.c.data.in_(["data2", "data3"])) + ), + ) + + eq_( + config.db.execute( + select([table]).order_by(table.c.data) + ).fetchall(), + [(1, 'data1', 5, 4), (2, 'data2', 5, 4), + (7, 'data2', 5, 4), (3, 'data3', 5, 4), (8, 'data3', 5, 4)] + ) + class ReturningTest(fixtures.TablesTest): run_create_tables = 'each' diff --git a/test/sql/test_defaults.py b/test/sql/test_defaults.py index abce600df..10e557b76 100644 --- a/test/sql/test_defaults.py +++ b/test/sql/test_defaults.py @@ -14,6 +14,7 @@ from sqlalchemy.dialects import sqlite from sqlalchemy.testing import fixtures from sqlalchemy.util import u, b from sqlalchemy import util +import itertools t = f = f2 = ts = currenttime = metadata = default_generator = None @@ -1278,3 +1279,67 @@ class UnicodeDefaultsTest(fixtures.TestBase): "foobar", Unicode(32), default=default ) + + +class InsertFromSelectTest(fixtures.TestBase): + __backend__ = True + + def _fixture(self): + data = Table( + 'data', self.metadata, + Column('x', Integer), + Column('y', Integer) + ) + data.create() + testing.db.execute(data.insert(), {'x': 2, 'y': 5}, {'x': 7, 'y': 12}) + return data + + @testing.provide_metadata + def test_insert_from_select_override_defaults(self): + data = self._fixture() + + table = Table('sometable', self.metadata, + Column('x', Integer), + Column('foo', Integer, default=12), + Column('y', Integer)) + + table.create() + + sel = select([data.c.x, data.c.y]) + + ins = table.insert().\ + from_select(["x", "y"], sel) + testing.db.execute(ins) + + eq_( + testing.db.execute(table.select().order_by(table.c.x)).fetchall(), + [(2, 12, 5), (7, 12, 12)] + ) + + @testing.provide_metadata + def test_insert_from_select_fn_defaults(self): + data = self._fixture() + + counter = itertools.count(1) + + def foo(ctx): + return next(counter) + + table = Table('sometable', self.metadata, + Column('x', Integer), + Column('foo', Integer, default=foo), + Column('y', Integer)) + + table.create() + + sel = select([data.c.x, data.c.y]) + + ins = table.insert().\ + from_select(["x", "y"], sel) + testing.db.execute(ins) + + # counter is only called once! + eq_( + testing.db.execute(table.select().order_by(table.c.x)).fetchall(), + [(2, 1, 5), (7, 1, 12)] + ) diff --git a/test/sql/test_insert.py b/test/sql/test_insert.py index 232c5758b..bd4eaa3e2 100644 --- a/test/sql/test_insert.py +++ b/test/sql/test_insert.py @@ -183,7 +183,7 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL): checkparams={"name_1": "foo"} ) - def test_insert_from_select_select_no_defaults(self): + def test_insert_from_select_no_defaults(self): metadata = MetaData() table = Table('sometable', metadata, Column('id', Integer, primary_key=True), @@ -191,7 +191,7 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL): table1 = self.tables.mytable sel = select([table1.c.myid]).where(table1.c.name == 'foo') ins = table.insert().\ - from_select(["id"], sel) + from_select(["id"], sel, include_defaults=False) self.assert_compile( ins, "INSERT INTO sometable (id) SELECT mytable.myid " @@ -199,6 +199,84 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL): checkparams={"name_1": "foo"} ) + def test_insert_from_select_with_sql_defaults(self): + metadata = MetaData() + table = Table('sometable', metadata, + Column('id', Integer, primary_key=True), + Column('foo', Integer, default=func.foobar())) + table1 = self.tables.mytable + sel = select([table1.c.myid]).where(table1.c.name == 'foo') + ins = table.insert().\ + from_select(["id"], sel) + self.assert_compile( + ins, + "INSERT INTO sometable (id, foo) SELECT " + "mytable.myid, foobar() AS foobar_1 " + "FROM mytable WHERE mytable.name = :name_1", + checkparams={"name_1": "foo"} + ) + + def test_insert_from_select_with_python_defaults(self): + metadata = MetaData() + table = Table('sometable', metadata, + Column('id', Integer, primary_key=True), + Column('foo', Integer, default=12)) + table1 = self.tables.mytable + sel = select([table1.c.myid]).where(table1.c.name == 'foo') + ins = table.insert().\ + from_select(["id"], sel) + self.assert_compile( + ins, + "INSERT INTO sometable (id, foo) SELECT " + "mytable.myid, :foo AS anon_1 " + "FROM mytable WHERE mytable.name = :name_1", + # value filled in at execution time + checkparams={"name_1": "foo", "foo": None} + ) + + def test_insert_from_select_override_defaults(self): + metadata = MetaData() + table = Table('sometable', metadata, + Column('id', Integer, primary_key=True), + Column('foo', Integer, default=12)) + table1 = self.tables.mytable + sel = select( + [table1.c.myid, table1.c.myid.label('q')]).where( + table1.c.name == 'foo') + ins = table.insert().\ + from_select(["id", "foo"], sel) + self.assert_compile( + ins, + "INSERT INTO sometable (id, foo) SELECT " + "mytable.myid, mytable.myid AS q " + "FROM mytable WHERE mytable.name = :name_1", + checkparams={"name_1": "foo"} + ) + + def test_insert_from_select_fn_defaults(self): + metadata = MetaData() + + def foo(ctx): + return 12 + + table = Table('sometable', metadata, + Column('id', Integer, primary_key=True), + Column('foo', Integer, default=foo)) + table1 = self.tables.mytable + sel = select( + [table1.c.myid]).where( + table1.c.name == 'foo') + ins = table.insert().\ + from_select(["id"], sel) + self.assert_compile( + ins, + "INSERT INTO sometable (id, foo) SELECT " + "mytable.myid, :foo AS anon_1 " + "FROM mytable WHERE mytable.name = :name_1", + # value filled in at execution time + checkparams={"name_1": "foo", "foo": None} + ) + def test_insert_mix_select_values_exception(self): table1 = self.tables.mytable sel = select([table1.c.myid, table1.c.name]).where( -- cgit v1.2.1 From 3e810771a8ace7351fcd43deaf55891fd1c30a53 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 11 Oct 2014 17:33:44 -0400 Subject: - change this literal so that the bound name doesn't have a numeric name, this is sort of a bug for oracle --- lib/sqlalchemy/testing/suite/test_insert.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py index c197145c7..2334d3049 100644 --- a/lib/sqlalchemy/testing/suite/test_insert.py +++ b/lib/sqlalchemy/testing/suite/test_insert.py @@ -4,7 +4,7 @@ from .. import exclusions from ..assertions import eq_ from .. import engines -from sqlalchemy import Integer, String, select, literal_column +from sqlalchemy import Integer, String, select, literal_column, literal from ..schema import Table, Column @@ -96,7 +96,7 @@ class InsertBehaviorTest(fixtures.TablesTest): Column('data', String(50)), Column('x', Integer, default=5), Column('y', Integer, - default=literal_column("2", type_=Integer) + 2)) + default=literal_column("2", type_=Integer) + literal(2))) def test_autoclose_on_insert(self): if requirements.returning.enabled: -- cgit v1.2.1 From cc29c4afff20dd251dbc045a490da9942f98b1bf Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 11 Oct 2014 18:25:21 -0400 Subject: - Fixed long-standing bug in Oracle dialect where bound parameter names that started with numbers would not be quoted, as Oracle doesn't like numerics in bound parameter names. fixes #2138 --- doc/build/changelog/changelog_09.rst | 9 +++++++++ lib/sqlalchemy/dialects/oracle/base.py | 3 ++- test/dialect/test_oracle.py | 22 ++++++++++++++++++++++ 3 files changed, 33 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index e2b893d07..fdc83c767 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -13,6 +13,15 @@ .. changelog:: :version: 0.9.8 + .. change:: + :tags: bug, oracle + :versions: 1.0.0 + :tickets: 2138 + + Fixed long-standing bug in Oracle dialect where bound parameter + names that started with numbers would not be quoted, as Oracle + doesn't like numerics in bound parameter names. + .. change:: :tags: bug, sql :versions: 1.0.0 diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index 837a498fb..6df38e57e 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -813,7 +813,8 @@ class OracleDDLCompiler(compiler.DDLCompiler): class OracleIdentifierPreparer(compiler.IdentifierPreparer): reserved_words = set([x.lower() for x in RESERVED_WORDS]) - illegal_initial_characters = set(range(0, 10)).union(["_", "$"]) + illegal_initial_characters = set( + (str(dig) for dig in range(0, 10))).union(["_", "$"]) def _bindparam_requires_quotes(self, value): """Return True if the given identifier requires quoting.""" diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py index 36eacf864..a771c5d80 100644 --- a/test/dialect/test_oracle.py +++ b/test/dialect/test_oracle.py @@ -104,6 +104,28 @@ class QuotedBindRoundTripTest(fixtures.TestBase): (2, 2, 2) ) + def test_numeric_bind_round_trip(self): + eq_( + testing.db.scalar( + select([ + literal_column("2", type_=Integer()) + + bindparam("2_1", value=2)]) + ), + 4 + ) + + @testing.provide_metadata + def test_numeric_bind_in_crud(self): + t = Table( + "asfd", self.metadata, + Column("100K", Integer) + ) + t.create() + + testing.db.execute(t.insert(), {"100K": 10}) + eq_( + testing.db.scalar(t.select()), 10 + ) class CompileTest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = "oracle" #oracle.dialect() -- cgit v1.2.1 From 83e465633793e4a6d76e41b12fb92d7cc4bbddf3 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 11 Oct 2014 18:35:12 -0400 Subject: - embedding an existing predicate into a new one only seems to be used by test_oracle->test_coerce_to_unicode(). The predicate here should treat as a lambda based on enabled_for_config. not sure why this test is not failing on jenkins --- lib/sqlalchemy/testing/exclusions.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py index 49211f805..f94724608 100644 --- a/lib/sqlalchemy/testing/exclusions.py +++ b/lib/sqlalchemy/testing/exclusions.py @@ -178,8 +178,7 @@ class Predicate(object): @classmethod def as_predicate(cls, predicate, description=None): if isinstance(predicate, compound): - return cls.as_predicate(predicate.fails.union(predicate.skips)) - + return cls.as_predicate(predicate.enabled_for_config, description) elif isinstance(predicate, Predicate): if description and predicate.description is None: predicate.description = description -- cgit v1.2.1 From 216b88894d95c17a1bd18b9d574e96530fb6f1cb Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 11 Oct 2014 19:02:32 -0400 Subject: add more order by here --- lib/sqlalchemy/testing/suite/test_insert.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py index 2334d3049..38519dfb9 100644 --- a/lib/sqlalchemy/testing/suite/test_insert.py +++ b/lib/sqlalchemy/testing/suite/test_insert.py @@ -187,7 +187,7 @@ class InsertBehaviorTest(fixtures.TablesTest): eq_( config.db.execute( - select([table]).order_by(table.c.data) + select([table]).order_by(table.c.data, table.c.id) ).fetchall(), [(1, 'data1', 5, 4), (2, 'data2', 5, 4), (7, 'data2', 5, 4), (3, 'data3', 5, 4), (8, 'data3', 5, 4)] -- cgit v1.2.1 From 6d5c03001cd81d9ad5cee2459f222a6319f5b956 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 12 Oct 2014 09:54:44 -0400 Subject: - roll back part of pr 140 to get exception wrapping working again --- lib/sqlalchemy/exc.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py index 5d35dc2e7..8e65ef07b 100644 --- a/lib/sqlalchemy/exc.py +++ b/lib/sqlalchemy/exc.py @@ -280,9 +280,13 @@ class DBAPIError(StatementError): connection_invalidated=False): # Don't ever wrap these, just return them directly as if # DBAPIError didn't exist. - if (isinstance(orig, BaseException) and - not isinstance(orig, Exception)) or \ - isinstance(orig, DontWrapMixin): + if isinstance(orig, (KeyboardInterrupt, SystemExit, DontWrapMixin)): + + # TODO: shouldn't it work this way? see if we can get this + # to work in py3k + #if (isinstance(orig, BaseException) and + # not isinstance(orig, Exception)) or \ + # isinstance(orig, DontWrapMixin): return orig if orig is not None: -- cgit v1.2.1 From 0426d174e4a608cb09878fe18185b2ae853243ad Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 12 Oct 2014 19:37:14 -0400 Subject: - the test_except test was doing an unnecessary workaround of some kind, take that out, restore the better exception logic in exc --- lib/sqlalchemy/exc.py | 10 +++------- test/base/test_except.py | 9 +-------- 2 files changed, 4 insertions(+), 15 deletions(-) diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py index 8e65ef07b..5d35dc2e7 100644 --- a/lib/sqlalchemy/exc.py +++ b/lib/sqlalchemy/exc.py @@ -280,13 +280,9 @@ class DBAPIError(StatementError): connection_invalidated=False): # Don't ever wrap these, just return them directly as if # DBAPIError didn't exist. - if isinstance(orig, (KeyboardInterrupt, SystemExit, DontWrapMixin)): - - # TODO: shouldn't it work this way? see if we can get this - # to work in py3k - #if (isinstance(orig, BaseException) and - # not isinstance(orig, Exception)) or \ - # isinstance(orig, DontWrapMixin): + if (isinstance(orig, BaseException) and + not isinstance(orig, Exception)) or \ + isinstance(orig, DontWrapMixin): return orig if orig is not None: diff --git a/test/base/test_except.py b/test/base/test_except.py index a62382725..359473c54 100644 --- a/test/base/test_except.py +++ b/test/base/test_except.py @@ -2,19 +2,12 @@ from sqlalchemy import exc as sa_exceptions -from sqlalchemy import util from sqlalchemy.testing import fixtures from sqlalchemy.testing import eq_ -if util.py2k: - from exceptions import StandardError, KeyboardInterrupt, SystemExit -else: - Exception = BaseException - class Error(Exception): - """This class will be old-style on <= 2.4 and new-style on >= - 2.5.""" + pass class DatabaseError(Error): -- cgit v1.2.1 From 50d2432a9efa65c9798ef207e3f887cb5c0071e1 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 12 Oct 2014 20:14:32 -0400 Subject: - Mysqlconnector as of version 2.0, probably as a side effect of the python 3 merge, now does not expect percent signs (e.g. as used as the modulus operator and others) to be doubled, even when using the "pyformat" bound parameter format (this change is not documented by Mysqlconnector). The dialect now checks for py2k and for mysqlconnector less than version 2.0 when detecting if the modulus operator should be rendered as ``%%`` or ``%``. - Unicode SQL is now passed for MySQLconnector version 2.0 and above; for Py2k and MySQL < 2.0, strings are encoded. Note that mysqlconnector as of 2.0.1 appears to have a bug with unicode DDL on py2k, so the tests here are skipping until we observe it's fixed. - take out profiling on mysqlconnector, callcounts vary too much with its current development speed --- doc/build/changelog/changelog_09.rst | 21 ++++ lib/sqlalchemy/dialects/mysql/mysqlconnector.py | 46 +++++++- test/profiles.txt | 146 +++++++++--------------- test/requirements.py | 6 + test/sql/test_query.py | 6 - 5 files changed, 123 insertions(+), 102 deletions(-) diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index fdc83c767..fd36240f8 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -13,6 +13,27 @@ .. changelog:: :version: 0.9.8 + .. change:: + :tags: bug, mysql, mysqlconnector + :versions: 1.0.0 + + Mysqlconnector as of version 2.0, probably as a side effect of + the python 3 merge, now does not expect percent signs (e.g. + as used as the modulus operator and others) to be doubled, + even when using the "pyformat" bound parameter format (this + change is not documented by Mysqlconnector). The dialect now + checks for py2k and for mysqlconnector less than version 2.0 + when detecting if the modulus operator should be rendered as + ``%%`` or ``%``. + + .. change:: + :tags: bug, mysql, mysqlconnector + :versions: 1.0.0 + + Unicode SQL is now passed for MySQLconnector version 2.0 and above; + for Py2k and MySQL < 2.0, strings are encoded. + + .. change:: :tags: bug, oracle :versions: 1.0.0 diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py index 6077ce53e..417e1ad6f 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -21,6 +21,7 @@ from .base import (MySQLDialect, MySQLExecutionContext, BIT) from ... import util +import re class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext): @@ -31,18 +32,34 @@ class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext): class MySQLCompiler_mysqlconnector(MySQLCompiler): def visit_mod_binary(self, binary, operator, **kw): - return self.process(binary.left, **kw) + " %% " + \ - self.process(binary.right, **kw) + if self.dialect._mysqlconnector_double_percents: + return self.process(binary.left, **kw) + " %% " + \ + self.process(binary.right, **kw) + else: + return self.process(binary.left, **kw) + " % " + \ + self.process(binary.right, **kw) def post_process_text(self, text): - return text.replace('%', '%%') + if self.dialect._mysqlconnector_double_percents: + return text.replace('%', '%%') + else: + return text + + def escape_literal_column(self, text): + if self.dialect._mysqlconnector_double_percents: + return text.replace('%', '%%') + else: + return text class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer): def _escape_identifier(self, value): value = value.replace(self.escape_quote, self.escape_to_quote) - return value.replace("%", "%%") + if self.dialect._mysqlconnector_double_percents: + return value.replace("%", "%%") + else: + return value class _myconnpyBIT(BIT): @@ -55,8 +72,6 @@ class _myconnpyBIT(BIT): class MySQLDialect_mysqlconnector(MySQLDialect): driver = 'mysqlconnector' - if util.py2k: - supports_unicode_statements = False supports_unicode_binds = True supports_sane_rowcount = True @@ -77,6 +92,10 @@ class MySQLDialect_mysqlconnector(MySQLDialect): } ) + @util.memoized_property + def supports_unicode_statements(self): + return util.py3k or self._mysqlconnector_version_info > (2, 0) + @classmethod def dbapi(cls): from mysql import connector @@ -107,6 +126,21 @@ class MySQLDialect_mysqlconnector(MySQLDialect): pass return [[], opts] + @util.memoized_property + def _mysqlconnector_version_info(self): + if self.dbapi and hasattr(self.dbapi, '__version__'): + m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?', + self.dbapi.__version__) + if m: + return tuple( + int(x) + for x in m.group(1, 2, 3) + if x is not None) + + @util.memoized_property + def _mysqlconnector_double_percents(self): + return not util.py3k and self._mysqlconnector_version_info < (2, 0) + def _get_server_version_info(self, connection): dbapi_con = connection.connection version = dbapi_con.get_server_version() diff --git a/test/profiles.txt b/test/profiles.txt index 12222b637..dc4d05264 100644 --- a/test/profiles.txt +++ b/test/profiles.txt @@ -1,34 +1,28 @@ # /Users/classic/dev/sqlalchemy/test/profiles.txt # This file is written out on a per-environment basis. -# For each test in aaa_profiling, the corresponding function and +# For each test in aaa_profiling, the corresponding function and # environment is located within this file. If it doesn't exist, # the test is skipped. -# If a callcount does exist, it is compared to what we received. +# If a callcount does exist, it is compared to what we received. # assertions are raised if the counts do not match. -# -# To add a new callcount test, apply the function_call_count -# decorator and re-run the tests using the --write-profiles +# +# To add a new callcount test, apply the function_call_count +# decorator and re-run the tests using the --write-profiles # option - this file will be rewritten including the new count. -# +# # TEST: test.aaa_profiling.test_compiler.CompileTest.test_insert -test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqlconnector_cextensions 74 -test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqlconnector_nocextensions 74 test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_cextensions 74 test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_nocextensions 74 test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_cextensions 74 test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_nocextensions 74 test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_cextensions 74 test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_nocextensions 74 -test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_mysqlconnector_cextensions 77 -test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_mysqlconnector_nocextensions 77 test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_cextensions 77 test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_nocextensions 77 test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_cextensions 77 test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_nocextensions 77 -test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_mysqlconnector_cextensions 77 -test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_mysqlconnector_nocextensions 77 test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_cextensions 77 test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_nocextensions 77 test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_cextensions 77 @@ -36,22 +30,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_noc # TEST: test.aaa_profiling.test_compiler.CompileTest.test_select -test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqlconnector_cextensions 152 -test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqlconnector_nocextensions 152 test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_cextensions 152 test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_nocextensions 152 test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_cextensions 152 test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_nocextensions 152 test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_cextensions 152 test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_nocextensions 152 -test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_mysqlconnector_cextensions 165 -test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_mysqlconnector_nocextensions 165 test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_cextensions 165 test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_nocextensions 165 test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_cextensions 165 test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_nocextensions 165 -test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_mysqlconnector_cextensions 165 -test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_mysqlconnector_nocextensions 165 test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_cextensions 165 test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_nocextensions 165 test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_cextensions 165 @@ -59,22 +47,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_noc # TEST: test.aaa_profiling.test_compiler.CompileTest.test_select_labels -test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqlconnector_cextensions 186 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqlconnector_nocextensions 186 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_cextensions 186 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_nocextensions 186 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_cextensions 186 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_nocextensions 186 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_cextensions 186 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_nocextensions 186 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_mysqlconnector_cextensions 199 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_mysqlconnector_nocextensions 199 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_cextensions 199 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_nocextensions 199 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_cextensions 199 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_nocextensions 199 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_mysqlconnector_cextensions 199 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_mysqlconnector_nocextensions 199 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_cextensions 199 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_nocextensions 199 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_cextensions 199 @@ -82,22 +64,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysql # TEST: test.aaa_profiling.test_compiler.CompileTest.test_update -test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqlconnector_cextensions 79 -test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqlconnector_nocextensions 79 test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_cextensions 79 test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_nocextensions 79 test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_cextensions 77 test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_nocextensions 77 test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_cextensions 77 test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_nocextensions 77 -test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_mysqlconnector_cextensions 80 -test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_mysqlconnector_nocextensions 80 test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_cextensions 78 test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_nocextensions 78 test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_cextensions 78 test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_nocextensions 78 -test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_mysqlconnector_cextensions 80 -test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_mysqlconnector_nocextensions 80 test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_cextensions 78 test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_nocextensions 78 test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_cextensions 78 @@ -105,22 +81,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_noc # TEST: test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqlconnector_cextensions 148 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqlconnector_nocextensions 148 test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_cextensions 148 test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_nocextensions 148 test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 148 test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_nocextensions 148 test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_cextensions 148 test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_nocextensions 148 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_mysqlconnector_cextensions 148 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_mysqlconnector_nocextensions 148 test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_cextensions 148 test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_nocextensions 148 test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_cextensions 148 test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_nocextensions 148 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_mysqlconnector_cextensions 148 -test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_mysqlconnector_nocextensions 148 test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_cextensions 148 test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_nocextensions 148 test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_cextensions 148 @@ -134,8 +104,6 @@ test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgre test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_nocextensions 4265 test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_cextensions 4265 test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_nocextensions 4260 -test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_mysql_mysqlconnector_cextensions 4266 -test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_mysql_mysqlconnector_nocextensions 4266 test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_postgresql_psycopg2_nocextensions 4266 test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_sqlite_pysqlite_cextensions 4266 test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_sqlite_pysqlite_nocextensions 4266 @@ -150,8 +118,6 @@ test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_postgresql_psycopg2_nocextensions 6426 test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_cextensions 6426 test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_nocextensions 6426 -test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_mysql_mysqlconnector_cextensions 6428 -test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_mysql_mysqlconnector_nocextensions 6428 test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_postgresql_psycopg2_nocextensions 6428 test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_sqlite_pysqlite_cextensions 6428 test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_sqlite_pysqlite_nocextensions 6428 @@ -166,8 +132,8 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycop test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_nocextensions 40149 test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_cextensions 19280 test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_nocextensions 28297 -test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_mysql_mysqlconnector_cextensions 107603 -test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_mysql_mysqlconnector_nocextensions 116606 + + test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_postgresql_psycopg2_nocextensions 29138 test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_cextensions 32398 test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_nocextensions 37327 @@ -182,8 +148,8 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_nocextensions 30054 test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_cextensions 27144 test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_nocextensions 30149 -test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_mysql_mysqlconnector_cextensions 53281 -test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_mysql_mysqlconnector_nocextensions 56284 + + test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_postgresql_psycopg2_nocextensions 29068 test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_cextensions 32197 test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_nocextensions 31179 @@ -198,8 +164,8 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_postgresql_psycopg2_nocextensions 17988 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_cextensions 17988 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_nocextensions 17988 -test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_mysql_mysqlconnector_cextensions 18988 -test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_mysql_mysqlconnector_nocextensions 18988 + + test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_postgresql_psycopg2_nocextensions 18988 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_cextensions 18988 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_nocextensions 18988 @@ -214,8 +180,8 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_nocextensions 122553 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_cextensions 162315 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_nocextensions 165111 -test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_mysql_mysqlconnector_cextensions 200102 -test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_mysql_mysqlconnector_nocextensions 201852 + + test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_postgresql_psycopg2_nocextensions 125352 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_cextensions 169566 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_nocextensions 171364 @@ -230,8 +196,8 @@ test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2. test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_nocextensions 19219 test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_cextensions 22288 test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_nocextensions 22530 -test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_mysql_mysqlconnector_cextensions 24956 -test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_mysql_mysqlconnector_nocextensions 24936 + + test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_postgresql_psycopg2_nocextensions 19492 test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_cextensions 23067 test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_nocextensions 23271 @@ -246,8 +212,8 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_ce test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_nocextensions 1348 test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_cextensions 1601 test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_nocextensions 1626 -test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_mysql_mysqlconnector_cextensions 2215 -test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_mysql_mysqlconnector_nocextensions 2230 + + test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_nocextensions 1355 test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_cextensions 1656 test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_nocextensions 1671 @@ -262,8 +228,8 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2 test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_nocextensions 117,18 test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_cextensions 117,18 test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_nocextensions 117,18 -test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_mysql_mysqlconnector_cextensions 122,19 -test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_mysql_mysqlconnector_nocextensions 122,19 + + test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2_nocextensions 122,19 test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_cextensions 122,19 test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_nocextensions 122,19 @@ -278,8 +244,8 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psy test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_nocextensions 91 test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_cextensions 91 test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_nocextensions 91 -test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_mysql_mysqlconnector_cextensions 78 -test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_mysql_mysqlconnector_nocextensions 78 + + test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psycopg2_nocextensions 78 test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_cextensions 78 test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_nocextensions 78 @@ -294,8 +260,8 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_ps test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_psycopg2_nocextensions 31 test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_cextensions 31 test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_nocextensions 31 -test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_mysql_mysqlconnector_cextensions 24 -test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_mysql_mysqlconnector_nocextensions 24 + + test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_postgresql_psycopg2_nocextensions 24 test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_cextensions 24 test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_nocextensions 24 @@ -310,8 +276,8 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_po test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_postgresql_psycopg2_nocextensions 8 test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_cextensions 8 test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_nocextensions 8 -test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_mysql_mysqlconnector_cextensions 9 -test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_mysql_mysqlconnector_nocextensions 9 + + test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_postgresql_psycopg2_nocextensions 9 test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_cextensions 9 test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_nocextensions 9 @@ -320,22 +286,22 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_po # TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqlconnector_cextensions 43 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqlconnector_nocextensions 45 + + test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_cextensions 43 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_nocextensions 45 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_cextensions 43 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_nocextensions 45 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_cextensions 43 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_nocextensions 45 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_mysql_mysqlconnector_cextensions 43 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_mysql_mysqlconnector_nocextensions 43 + + test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_cextensions 43 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_nocextensions 43 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_cextensions 43 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_nocextensions 43 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_mysqlconnector_cextensions 43 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_mysqlconnector_nocextensions 43 + + test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_cextensions 43 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_nocextensions 43 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_sqlite_pysqlite_cextensions 43 @@ -343,22 +309,22 @@ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute # TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqlconnector_cextensions 78 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqlconnector_nocextensions 80 + + test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_cextensions 78 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_nocextensions 80 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_cextensions 78 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_nocextensions 80 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_cextensions 78 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_nocextensions 80 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_mysql_mysqlconnector_cextensions 78 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_mysql_mysqlconnector_nocextensions 78 + + test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_cextensions 78 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_nocextensions 78 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_cextensions 78 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_nocextensions 78 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_mysqlconnector_cextensions 78 -test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_mysqlconnector_nocextensions 78 + + test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_cextensions 78 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_nocextensions 78 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_sqlite_pysqlite_cextensions 78 @@ -366,22 +332,22 @@ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_ # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqlconnector_cextensions 15 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqlconnector_nocextensions 15 + + test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_cextensions 15 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_nocextensions 15 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_cextensions 15 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_nocextensions 15 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_cextensions 15 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_nocextensions 15 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_mysql_mysqlconnector_cextensions 16 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_mysql_mysqlconnector_nocextensions 16 + + test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_cextensions 16 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_nocextensions 16 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_cextensions 16 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_nocextensions 16 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_mysqlconnector_cextensions 16 -test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_mysqlconnector_nocextensions 16 + + test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_cextensions 16 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_nocextensions 16 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_sqlite_pysqlite_cextensions 16 @@ -389,22 +355,22 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_string -test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqlconnector_cextensions 92959 -test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqlconnector_nocextensions 107979 + + test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_cextensions 514 test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_nocextensions 15534 test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20501 test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_nocextensions 35521 test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_cextensions 457 test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_nocextensions 15477 -test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_mysql_mysqlconnector_cextensions 109136 -test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_mysql_mysqlconnector_nocextensions 123136 + + test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_cextensions 489 test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_nocextensions 14489 test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_cextensions 462 test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_nocextensions 14462 -test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_mysqlconnector_cextensions 79876 -test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_mysqlconnector_nocextensions 93876 + + test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_cextensions 489 test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_nocextensions 14489 test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_cextensions 462 @@ -412,22 +378,22 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_ # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_unicode -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqlconnector_cextensions 92959 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqlconnector_nocextensions 107979 + + test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_cextensions 514 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_nocextensions 45534 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20501 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_nocextensions 35521 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_cextensions 457 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_nocextensions 15477 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_mysql_mysqlconnector_cextensions 109136 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_mysql_mysqlconnector_nocextensions 123136 + + test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_cextensions 489 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_nocextensions 14489 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_cextensions 462 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_nocextensions 14462 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_mysqlconnector_cextensions 79876 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_mysqlconnector_nocextensions 93876 + + test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_cextensions 489 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_nocextensions 14489 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_cextensions 462 diff --git a/test/requirements.py b/test/requirements.py index 80bd135e9..0a695b641 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -420,6 +420,12 @@ class DefaultRequirements(SuiteRequirements): no_support('oracle', 'FIXME: no support in database?'), no_support('sybase', 'FIXME: guessing, needs confirmation'), no_support('mssql+pymssql', 'no FreeTDS support'), + LambdaPredicate( + lambda config: against(config, "mysql+mysqlconnector") and + config.db.dialect._mysqlconnector_version_info > (2, 0) and + util.py2k, + "bug in mysqlconnector 2.0" + ), LambdaPredicate( lambda config: against(config, 'mssql+pyodbc') and config.db.dialect.freetds and diff --git a/test/sql/test_query.py b/test/sql/test_query.py index 430c3fe7c..0ae1b0b34 100644 --- a/test/sql/test_query.py +++ b/test/sql/test_query.py @@ -570,9 +570,6 @@ class QueryTest(fixtures.TestBase): ): eq_(expr.execute().fetchall(), result) - @testing.fails_if( - lambda: util.py3k and testing.against('mysql+mysqlconnector'), - "bug in mysqlconnector") @testing.requires.mod_operator_as_percent_sign @testing.emits_warning('.*now automatically escapes.*') def test_percents_in_text(self): @@ -2510,9 +2507,6 @@ class OperatorTest(fixtures.TestBase): metadata.drop_all() # TODO: seems like more tests warranted for this setup. - @testing.fails_if( - lambda: util.py3k and testing.against('mysql+mysqlconnector'), - "bug in mysqlconnector") def test_modulo(self): eq_( select([flds.c.intcol % 3], -- cgit v1.2.1 From e04b693e7c7c216526a7e6be6f4c074f4f400c3c Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 13 Oct 2014 12:34:41 -0400 Subject: - this test passes now in more recent mysqlconnector --- test/dialect/mysql/test_types.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/test/dialect/mysql/test_types.py b/test/dialect/mysql/test_types.py index 75dbe15e0..e65acc6db 100644 --- a/test/dialect/mysql/test_types.py +++ b/test/dialect/mysql/test_types.py @@ -154,10 +154,8 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): res ) - @testing.fails_if( - lambda: testing.against("mysql+mysqlconnector") - and not util.py3k, - "bug in mysqlconnector; http://bugs.mysql.com/bug.php?id=73266") + # fixed in mysql-connector as of 2.0.1, + # see http://bugs.mysql.com/bug.php?id=73266 @testing.provide_metadata def test_precision_float_roundtrip(self): t = Table('t', self.metadata, -- cgit v1.2.1 From 198917237e6051a02e6fd681de9200a3ab737fa8 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 13 Oct 2014 12:37:29 -0400 Subject: - additional issues fixed in mysqlconnector 2.0.1 --- test/sql/test_query.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/test/sql/test_query.py b/test/sql/test_query.py index 0ae1b0b34..fc040dfed 100644 --- a/test/sql/test_query.py +++ b/test/sql/test_query.py @@ -295,9 +295,6 @@ class QueryTest(fixtures.TestBase): l.append(row) self.assert_(len(l) == 3) - @testing.fails_if( - lambda: util.py3k and testing.against('mysql+mysqlconnector'), - "bug in mysqlconnector") @testing.requires.subqueries def test_anonymous_rows(self): users.insert().execute( @@ -509,9 +506,6 @@ class QueryTest(fixtures.TestBase): lambda: row[accessor] ) - @testing.fails_if( - lambda: util.py3k and testing.against('mysql+mysqlconnector'), - "bug in mysqlconnector") @testing.requires.boolean_col_expressions def test_or_and_as_columns(self): true, false = literal(True), literal(False) @@ -620,9 +614,6 @@ class QueryTest(fixtures.TestBase): c = testing.db.connect() assert c.execute(s, id=7).fetchall()[0]['user_id'] == 7 - @testing.fails_if( - lambda: util.py3k and testing.against('mysql+mysqlconnector'), - "bug in mysqlconnector") def test_repeated_bindparams(self): """Tests that a BindParam can be used more than once. @@ -1316,9 +1307,6 @@ class QueryTest(fixtures.TestBase): # Null values are not outside any set assert len(r) == 0 - @testing.fails_if( - lambda: util.py3k and testing.against('mysql+mysqlconnector'), - "bug in mysqlconnector") @testing.emits_warning('.*empty sequence.*') @testing.fails_on('firebird', "uses sql-92 rules") @testing.fails_on('sybase', "uses sql-92 rules") @@ -1345,9 +1333,6 @@ class QueryTest(fixtures.TestBase): r = s.execute(search_key=None).fetchall() assert len(r) == 0 - @testing.fails_if( - lambda: util.py3k and testing.against('mysql+mysqlconnector'), - "bug in mysqlconnector") @testing.emits_warning('.*empty sequence.*') def test_literal_in(self): """similar to test_bind_in but use a bind with a value.""" -- cgit v1.2.1 From 09e2a15a8052ad6e4f3fe41bc74b1aeeafe613a7 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 13 Oct 2014 13:17:24 -0400 Subject: - merge 0.9.8 release date --- doc/build/changelog/changelog_09.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index fd36240f8..8687284e8 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -12,6 +12,7 @@ .. changelog:: :version: 0.9.8 + :released: October 13, 2014 .. change:: :tags: bug, mysql, mysqlconnector -- cgit v1.2.1 From fb09ad7551cf348f999647347882546a1f50dcbe Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 14 Oct 2014 11:59:48 -0400 Subject: - The ``__module__`` attribute is now set for all those SQL and ORM functions that are derived as "public factory" symbols, which should assist with documentation tools being able to report on the target module. fixes #3218 --- doc/build/changelog/changelog_10.rst | 9 +++++++++ lib/sqlalchemy/util/langhelpers.py | 4 +++- test/base/test_utils.py | 39 +++++++++++++++++++++++++++++++++++- 3 files changed, 50 insertions(+), 2 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 3d471f192..8578c7883 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,15 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: bug, general + :tickets: 3218 + + The ``__module__`` attribute is now set for all those SQL and + ORM functions that are derived as "public factory" symbols, which + should assist with documentation tools being able to report on the + target module. + .. change:: :tags: feature, sql diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index f6da9a87d..5c17bea88 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -134,7 +134,8 @@ def public_factory(target, location): fn = target.__init__ callable_ = target doc = "Construct a new :class:`.%s` object. \n\n"\ - "This constructor is mirrored as a public API function; see :func:`~%s` "\ + "This constructor is mirrored as a public API function; "\ + "see :func:`~%s` "\ "for a full usage and argument description." % ( target.__name__, location, ) else: @@ -155,6 +156,7 @@ def %(name)s(%(args)s): exec(code, env) decorated = env[location_name] decorated.__doc__ = fn.__doc__ + decorated.__module__ = "sqlalchemy" + location.rsplit(".", 1)[0] if compat.py2k or hasattr(fn, '__func__'): fn.__func__.__doc__ = doc else: diff --git a/test/base/test_utils.py b/test/base/test_utils.py index a378b0160..f75c5cbe9 100644 --- a/test/base/test_utils.py +++ b/test/base/test_utils.py @@ -6,7 +6,7 @@ from sqlalchemy.testing import eq_, is_, ne_, fails_if from sqlalchemy.testing.util import picklers, gc_collect from sqlalchemy.util import classproperty, WeakSequence, get_callable_argspec from sqlalchemy.sql import column - +from sqlalchemy.util import langhelpers class _KeyedTupleTest(object): @@ -1274,6 +1274,43 @@ class DuckTypeCollectionTest(fixtures.TestBase): is_(util.duck_type_collection(instance), None) +class PublicFactoryTest(fixtures.TestBase): + + def _fixture(self): + class Thingy(object): + def __init__(self, value): + "make a thingy" + self.value = value + + @classmethod + def foobar(cls, x, y): + "do the foobar" + return Thingy(x + y) + + return Thingy + + def test_classmethod(self): + Thingy = self._fixture() + foob = langhelpers.public_factory( + Thingy.foobar, ".sql.elements.foob") + eq_(foob(3, 4).value, 7) + eq_(foob(x=3, y=4).value, 7) + eq_(foob.__doc__, "do the foobar") + eq_(foob.__module__, "sqlalchemy.sql.elements") + assert Thingy.foobar.__doc__.startswith("This function is mirrored;") + + def test_constructor(self): + Thingy = self._fixture() + foob = langhelpers.public_factory( + Thingy, ".sql.elements.foob") + eq_(foob(7).value, 7) + eq_(foob(value=7).value, 7) + eq_(foob.__doc__, "make a thingy") + eq_(foob.__module__, "sqlalchemy.sql.elements") + assert Thingy.__init__.__doc__.startswith( + "Construct a new :class:`.Thingy` object.") + + class ArgInspectionTest(fixtures.TestBase): def test_get_cls_kwargs(self): -- cgit v1.2.1 From 1e5ffa029a2b9adc1a6a3c83fb7e88a99d4e6448 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 14 Oct 2014 12:07:38 -0400 Subject: - move BindTest into orm/test_binds --- test/orm/test_bind.py | 199 +++++++++++++++++++++++++++++++++++++++++++++-- test/orm/test_session.py | 188 -------------------------------------------- 2 files changed, 194 insertions(+), 193 deletions(-) diff --git a/test/orm/test_bind.py b/test/orm/test_bind.py index 0d869130b..3e5af0cba 100644 --- a/test/orm/test_bind.py +++ b/test/orm/test_bind.py @@ -1,14 +1,205 @@ -from sqlalchemy.testing import assert_raises, assert_raises_message +from sqlalchemy.testing import assert_raises_message from sqlalchemy import MetaData, Integer from sqlalchemy.testing.schema import Table from sqlalchemy.testing.schema import Column from sqlalchemy.orm import mapper, create_session import sqlalchemy as sa from sqlalchemy import testing -from sqlalchemy.testing import fixtures +from sqlalchemy.testing import fixtures, eq_, engines +from sqlalchemy.orm import relationship, Session, backref, sessionmaker +from test.orm import _fixtures -class BindTest(fixtures.MappedTest): +class BindIntegrationTest(_fixtures.FixtureTest): + run_inserts = None + + def test_mapped_binds(self): + Address, addresses, users, User = (self.classes.Address, + self.tables.addresses, + self.tables.users, + self.classes.User) + + # ensure tables are unbound + m2 = sa.MetaData() + users_unbound = users.tometadata(m2) + addresses_unbound = addresses.tometadata(m2) + + mapper(Address, addresses_unbound) + mapper(User, users_unbound, properties={ + 'addresses': relationship(Address, + backref=backref("user", cascade="all"), + cascade="all")}) + + sess = Session(binds={User: self.metadata.bind, + Address: self.metadata.bind}) + + u1 = User(id=1, name='ed') + sess.add(u1) + eq_(sess.query(User).filter(User.id == 1).all(), + [User(id=1, name='ed')]) + + # test expression binding + + sess.execute(users_unbound.insert(), params=dict(id=2, + name='jack')) + eq_(sess.execute(users_unbound.select(users_unbound.c.id + == 2)).fetchall(), [(2, 'jack')]) + + eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(), + [(2, 'jack')]) + + sess.execute(users_unbound.delete()) + eq_(sess.execute(users_unbound.select()).fetchall(), []) + + sess.close() + + def test_table_binds(self): + Address, addresses, users, User = (self.classes.Address, + self.tables.addresses, + self.tables.users, + self.classes.User) + + # ensure tables are unbound + m2 = sa.MetaData() + users_unbound = users.tometadata(m2) + addresses_unbound = addresses.tometadata(m2) + + mapper(Address, addresses_unbound) + mapper(User, users_unbound, properties={ + 'addresses': relationship(Address, + backref=backref("user", cascade="all"), + cascade="all")}) + + Session = sessionmaker(binds={users_unbound: self.metadata.bind, + addresses_unbound: self.metadata.bind}) + sess = Session() + + u1 = User(id=1, name='ed') + sess.add(u1) + eq_(sess.query(User).filter(User.id == 1).all(), + [User(id=1, name='ed')]) + + sess.execute(users_unbound.insert(), params=dict(id=2, name='jack')) + + eq_(sess.execute(users_unbound.select(users_unbound.c.id + == 2)).fetchall(), [(2, 'jack')]) + + eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(), + [(2, 'jack')]) + + sess.execute(users_unbound.delete()) + eq_(sess.execute(users_unbound.select()).fetchall(), []) + + sess.close() + + def test_bind_from_metadata(self): + users, User = self.tables.users, self.classes.User + + mapper(User, users) + + session = create_session() + session.execute(users.insert(), dict(name='Johnny')) + + assert len(session.query(User).filter_by(name='Johnny').all()) == 1 + + session.execute(users.delete()) + + assert len(session.query(User).filter_by(name='Johnny').all()) == 0 + session.close() + + def test_bind_arguments(self): + users, Address, addresses, User = (self.tables.users, + self.classes.Address, + self.tables.addresses, + self.classes.User) + + mapper(User, users) + mapper(Address, addresses) + + e1 = engines.testing_engine() + e2 = engines.testing_engine() + e3 = engines.testing_engine() + + sess = Session(e3) + sess.bind_mapper(User, e1) + sess.bind_mapper(Address, e2) + + assert sess.connection().engine is e3 + assert sess.connection(bind=e1).engine is e1 + assert sess.connection(mapper=Address, bind=e1).engine is e1 + assert sess.connection(mapper=Address).engine is e2 + assert sess.connection(clause=addresses.select()).engine is e2 + assert sess.connection(mapper=User, + clause=addresses.select()).engine is e1 + assert sess.connection(mapper=User, + clause=addresses.select(), + bind=e2).engine is e2 + + sess.close() + + @engines.close_open_connections + def test_bound_connection(self): + users, User = self.tables.users, self.classes.User + + mapper(User, users) + c = testing.db.connect() + sess = create_session(bind=c) + sess.begin() + transaction = sess.transaction + u = User(name='u1') + sess.add(u) + sess.flush() + assert transaction._connection_for_bind(testing.db) \ + is transaction._connection_for_bind(c) is c + + assert_raises_message(sa.exc.InvalidRequestError, + 'Session already has a Connection ' + 'associated', + transaction._connection_for_bind, + testing.db.connect()) + transaction.rollback() + assert len(sess.query(User).all()) == 0 + sess.close() + + def test_bound_connection_transactional(self): + User, users = self.classes.User, self.tables.users + + mapper(User, users) + c = testing.db.connect() + + sess = create_session(bind=c, autocommit=False) + u = User(name='u1') + sess.add(u) + sess.flush() + sess.close() + assert not c.in_transaction() + assert c.scalar("select count(1) from users") == 0 + + sess = create_session(bind=c, autocommit=False) + u = User(name='u2') + sess.add(u) + sess.flush() + sess.commit() + assert not c.in_transaction() + assert c.scalar("select count(1) from users") == 1 + c.execute("delete from users") + assert c.scalar("select count(1) from users") == 0 + + c = testing.db.connect() + + trans = c.begin() + sess = create_session(bind=c, autocommit=True) + u = User(name='u3') + sess.add(u) + sess.flush() + assert c.in_transaction() + trans.commit() + assert not c.in_transaction() + assert c.scalar("select count(1) from users") == 1 + + +class SessionBindTest(fixtures.MappedTest): + @classmethod def define_tables(cls, metadata): Table('test_table', metadata, @@ -58,5 +249,3 @@ class BindTest(fixtures.MappedTest): ('Could not locate a bind configured on Mapper|Foo|test_table ' 'or this Session'), sess.flush) - - diff --git a/test/orm/test_session.py b/test/orm/test_session.py index 74a7a7442..06d1d7334 100644 --- a/test/orm/test_session.py +++ b/test/orm/test_session.py @@ -18,194 +18,6 @@ from sqlalchemy.testing import fixtures from test.orm import _fixtures from sqlalchemy import event, ForeignKey -class BindTest(_fixtures.FixtureTest): - run_inserts = None - - def test_mapped_binds(self): - Address, addresses, users, User = (self.classes.Address, - self.tables.addresses, - self.tables.users, - self.classes.User) - - - # ensure tables are unbound - m2 = sa.MetaData() - users_unbound = users.tometadata(m2) - addresses_unbound = addresses.tometadata(m2) - - mapper(Address, addresses_unbound) - mapper(User, users_unbound, properties={ - 'addresses': relationship(Address, - backref=backref("user", cascade="all"), - cascade="all")}) - - sess = Session(binds={User: self.metadata.bind, - Address: self.metadata.bind}) - - u1 = User(id=1, name='ed') - sess.add(u1) - eq_(sess.query(User).filter(User.id == 1).all(), - [User(id=1, name='ed')]) - - # test expression binding - - sess.execute(users_unbound.insert(), params=dict(id=2, - name='jack')) - eq_(sess.execute(users_unbound.select(users_unbound.c.id - == 2)).fetchall(), [(2, 'jack')]) - - eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(), - [(2, 'jack')]) - - sess.execute(users_unbound.delete()) - eq_(sess.execute(users_unbound.select()).fetchall(), []) - - sess.close() - - def test_table_binds(self): - Address, addresses, users, User = (self.classes.Address, - self.tables.addresses, - self.tables.users, - self.classes.User) - - - # ensure tables are unbound - m2 = sa.MetaData() - users_unbound = users.tometadata(m2) - addresses_unbound = addresses.tometadata(m2) - - mapper(Address, addresses_unbound) - mapper(User, users_unbound, properties={ - 'addresses': relationship(Address, - backref=backref("user", cascade="all"), - cascade="all")}) - - Session = sessionmaker(binds={users_unbound: self.metadata.bind, - addresses_unbound: self.metadata.bind}) - sess = Session() - - u1 = User(id=1, name='ed') - sess.add(u1) - eq_(sess.query(User).filter(User.id == 1).all(), - [User(id=1, name='ed')]) - - sess.execute(users_unbound.insert(), params=dict(id=2, name='jack')) - - eq_(sess.execute(users_unbound.select(users_unbound.c.id - == 2)).fetchall(), [(2, 'jack')]) - - eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(), - [(2, 'jack')]) - - sess.execute(users_unbound.delete()) - eq_(sess.execute(users_unbound.select()).fetchall(), []) - - sess.close() - - def test_bind_from_metadata(self): - users, User = self.tables.users, self.classes.User - - mapper(User, users) - - session = create_session() - session.execute(users.insert(), dict(name='Johnny')) - - assert len(session.query(User).filter_by(name='Johnny').all()) == 1 - - session.execute(users.delete()) - - assert len(session.query(User).filter_by(name='Johnny').all()) == 0 - session.close() - - def test_bind_arguments(self): - users, Address, addresses, User = (self.tables.users, - self.classes.Address, - self.tables.addresses, - self.classes.User) - - mapper(User, users) - mapper(Address, addresses) - - e1 = engines.testing_engine() - e2 = engines.testing_engine() - e3 = engines.testing_engine() - - sess = Session(e3) - sess.bind_mapper(User, e1) - sess.bind_mapper(Address, e2) - - assert sess.connection().engine is e3 - assert sess.connection(bind=e1).engine is e1 - assert sess.connection(mapper=Address, bind=e1).engine is e1 - assert sess.connection(mapper=Address).engine is e2 - assert sess.connection(clause=addresses.select()).engine is e2 - assert sess.connection(mapper=User, - clause=addresses.select()).engine is e1 - assert sess.connection(mapper=User, - clause=addresses.select(), - bind=e2).engine is e2 - - sess.close() - - @engines.close_open_connections - def test_bound_connection(self): - users, User = self.tables.users, self.classes.User - - mapper(User, users) - c = testing.db.connect() - sess = create_session(bind=c) - sess.begin() - transaction = sess.transaction - u = User(name='u1') - sess.add(u) - sess.flush() - assert transaction._connection_for_bind(testing.db) \ - is transaction._connection_for_bind(c) is c - - assert_raises_message(sa.exc.InvalidRequestError, - 'Session already has a Connection ' - 'associated', - transaction._connection_for_bind, - testing.db.connect()) - transaction.rollback() - assert len(sess.query(User).all()) == 0 - sess.close() - - def test_bound_connection_transactional(self): - User, users = self.classes.User, self.tables.users - - mapper(User, users) - c = testing.db.connect() - - sess = create_session(bind=c, autocommit=False) - u = User(name='u1') - sess.add(u) - sess.flush() - sess.close() - assert not c.in_transaction() - assert c.scalar("select count(1) from users") == 0 - - sess = create_session(bind=c, autocommit=False) - u = User(name='u2') - sess.add(u) - sess.flush() - sess.commit() - assert not c.in_transaction() - assert c.scalar("select count(1) from users") == 1 - c.execute("delete from users") - assert c.scalar("select count(1) from users") == 0 - - c = testing.db.connect() - - trans = c.begin() - sess = create_session(bind=c, autocommit=True) - u = User(name='u3') - sess.add(u) - sess.flush() - assert c.in_transaction() - trans.commit() - assert not c.in_transaction() - assert c.scalar("select count(1) from users") == 1 class ExecutionTest(_fixtures.FixtureTest): run_inserts = None -- cgit v1.2.1 From 6de1a878702b8737b2257b89b478ead79a8d78cc Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 14 Oct 2014 14:04:17 -0400 Subject: - Improvements to the mechanism used by :class:`.Session` to locate "binds" (e.g. engines to use), such engines can be associated with mixin classes, concrete subclasses, as well as a wider variety of table metadata such as joined inheritance tables. fixes #3035 --- doc/build/changelog/changelog_10.rst | 13 +++ doc/build/changelog/migration_10.rst | 42 +++++++ lib/sqlalchemy/orm/session.py | 96 ++++++++------- test/orm/test_bind.py | 220 ++++++++++++++++++++++++++++++++++- test/orm/test_session.py | 15 ++- 5 files changed, 336 insertions(+), 50 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 8578c7883..66fa2ad26 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,19 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: bug, orm + :tickets: 3035 + + Improvements to the mechanism used by :class:`.Session` to locate + "binds" (e.g. engines to use), such engines can be associated with + mixin classes, concrete subclasses, as well as a wider variety + of table metadata such as joined inheritance tables. + + .. seealso:: + + :ref:`bug_3035` + .. change:: :tags: bug, general :tickets: 3218 diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index 951e39603..dd8964f8b 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -468,6 +468,48 @@ object totally smokes both namedtuple and KeyedTuple:: :ticket:`3176` +.. _bug_3035: + +Session.get_bind() handles a wider variety of inheritance scenarios +------------------------------------------------------------------- + +The :meth:`.Session.get_bind` method is invoked whenever a query or unit +of work flush process seeks to locate the database engine that corresponds +to a particular class. The method has been improved to handle a variety +of inheritance-oriented scenarios, including: + +* Binding to a Mixin or Abstract Class:: + + class MyClass(SomeMixin, Base): + __tablename__ = 'my_table' + # ... + + session = Session(binds={SomeMixin: some_engine}) + + +* Binding to inherited concrete subclasses individually based on table:: + + class BaseClass(Base): + __tablename__ = 'base' + + # ... + + class ConcreteSubClass(BaseClass): + __tablename__ = 'concrete' + + # ... + + __mapper_args__ = {'concrete': True} + + + session = Session(binds={ + base_table: some_engine, + concrete_table: some_other_engine + }) + + +:ticket:`3035` + .. _feature_3178: New systems to safely emit parameterized warnings diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index 13afcb357..db9d3a51d 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -641,14 +641,8 @@ class Session(_SessionClassMethods): SessionExtension._adapt_listener(self, ext) if binds is not None: - for mapperortable, bind in binds.items(): - insp = inspect(mapperortable) - if insp.is_selectable: - self.bind_table(mapperortable, bind) - elif insp.is_mapper: - self.bind_mapper(mapperortable, bind) - else: - assert False + for key, bind in binds.items(): + self._add_bind(key, bind) if not self.autocommit: self.begin() @@ -1026,40 +1020,47 @@ class Session(_SessionClassMethods): # TODO: + crystallize + document resolution order # vis. bind_mapper/bind_table - def bind_mapper(self, mapper, bind): - """Bind operations for a mapper to a Connectable. - - mapper - A mapper instance or mapped class + def _add_bind(self, key, bind): + try: + insp = inspect(key) + except sa_exc.NoInspectionAvailable: + if not isinstance(key, type): + raise exc.ArgumentError( + "Not acceptable bind target: %s" % + key) + else: + self.__binds[key] = bind + else: + if insp.is_selectable: + self.__binds[insp] = bind + elif insp.is_mapper: + self.__binds[insp.class_] = bind + for selectable in insp._all_tables: + self.__binds[selectable] = bind + else: + raise exc.ArgumentError( + "Not acceptable bind target: %s" % + key) - bind - Any Connectable: a :class:`.Engine` or :class:`.Connection`. + def bind_mapper(self, mapper, bind): + """Associate a :class:`.Mapper` with a "bind", e.g. a :class:`.Engine` + or :class:`.Connection`. - All subsequent operations involving this mapper will use the given - `bind`. + The given mapper is added to a lookup used by the + :meth:`.Session.get_bind` method. """ - if isinstance(mapper, type): - mapper = class_mapper(mapper) - - self.__binds[mapper.base_mapper] = bind - for t in mapper._all_tables: - self.__binds[t] = bind + self._add_bind(mapper, bind) def bind_table(self, table, bind): - """Bind operations on a Table to a Connectable. - - table - A :class:`.Table` instance + """Associate a :class:`.Table` with a "bind", e.g. a :class:`.Engine` + or :class:`.Connection`. - bind - Any Connectable: a :class:`.Engine` or :class:`.Connection`. - - All subsequent operations involving this :class:`.Table` will use the - given `bind`. + The given mapper is added to a lookup used by the + :meth:`.Session.get_bind` method. """ - self.__binds[table] = bind + self._add_bind(table, bind) def get_bind(self, mapper=None, clause=None): """Return a "bind" to which this :class:`.Session` is bound. @@ -1113,6 +1114,7 @@ class Session(_SessionClassMethods): bound :class:`.MetaData`. """ + if mapper is clause is None: if self.bind: return self.bind @@ -1122,15 +1124,23 @@ class Session(_SessionClassMethods): "Connection, and no context was provided to locate " "a binding.") - c_mapper = mapper is not None and _class_to_mapper(mapper) or None + if mapper is not None: + try: + mapper = inspect(mapper) + except sa_exc.NoInspectionAvailable: + if isinstance(mapper, type): + raise exc.UnmappedClassError(mapper) + else: + raise - # manually bound? if self.__binds: - if c_mapper: - if c_mapper.base_mapper in self.__binds: - return self.__binds[c_mapper.base_mapper] - elif c_mapper.mapped_table in self.__binds: - return self.__binds[c_mapper.mapped_table] + if mapper: + for cls in mapper.class_.__mro__: + if cls in self.__binds: + return self.__binds[cls] + if clause is None: + clause = mapper.mapped_table + if clause is not None: for t in sql_util.find_tables(clause, include_crud=True): if t in self.__binds: @@ -1142,12 +1152,12 @@ class Session(_SessionClassMethods): if isinstance(clause, sql.expression.ClauseElement) and clause.bind: return clause.bind - if c_mapper and c_mapper.mapped_table.bind: - return c_mapper.mapped_table.bind + if mapper and mapper.mapped_table.bind: + return mapper.mapped_table.bind context = [] if mapper is not None: - context.append('mapper %s' % c_mapper) + context.append('mapper %s' % mapper) if clause is not None: context.append('SQL expression') diff --git a/test/orm/test_bind.py b/test/orm/test_bind.py index 3e5af0cba..33cd66ebc 100644 --- a/test/orm/test_bind.py +++ b/test/orm/test_bind.py @@ -1,13 +1,14 @@ from sqlalchemy.testing import assert_raises_message -from sqlalchemy import MetaData, Integer +from sqlalchemy import MetaData, Integer, ForeignKey from sqlalchemy.testing.schema import Table from sqlalchemy.testing.schema import Column from sqlalchemy.orm import mapper, create_session import sqlalchemy as sa from sqlalchemy import testing -from sqlalchemy.testing import fixtures, eq_, engines +from sqlalchemy.testing import fixtures, eq_, engines, is_ from sqlalchemy.orm import relationship, Session, backref, sessionmaker from test.orm import _fixtures +from sqlalchemy.testing.mock import Mock class BindIntegrationTest(_fixtures.FixtureTest): @@ -249,3 +250,218 @@ class SessionBindTest(fixtures.MappedTest): ('Could not locate a bind configured on Mapper|Foo|test_table ' 'or this Session'), sess.flush) + + +class GetBindTest(fixtures.MappedTest): + @classmethod + def define_tables(cls, metadata): + Table( + 'base_table', metadata, + Column('id', Integer, primary_key=True) + ) + Table( + 'w_mixin_table', metadata, + Column('id', Integer, primary_key=True) + ) + Table( + 'joined_sub_table', metadata, + Column('id', ForeignKey('base_table.id'), primary_key=True) + ) + Table( + 'concrete_sub_table', metadata, + Column('id', Integer, primary_key=True) + ) + + @classmethod + def setup_classes(cls): + class MixinOne(cls.Basic): + pass + + class BaseClass(cls.Basic): + pass + + class ClassWMixin(MixinOne, cls.Basic): + pass + + class JoinedSubClass(BaseClass): + pass + + class ConcreteSubClass(BaseClass): + pass + + @classmethod + def setup_mappers(cls): + mapper(cls.classes.ClassWMixin, cls.tables.w_mixin_table) + mapper(cls.classes.BaseClass, cls.tables.base_table) + mapper( + cls.classes.JoinedSubClass, + cls.tables.joined_sub_table, inherits=cls.classes.BaseClass) + mapper( + cls.classes.ConcreteSubClass, + cls.tables.concrete_sub_table, inherits=cls.classes.BaseClass, + concrete=True) + + def _fixture(self, binds): + return Session(binds=binds) + + def test_fallback_table_metadata(self): + session = self._fixture({}) + is_( + session.get_bind(self.classes.BaseClass), + testing.db + ) + + def test_bind_base_table_base_class(self): + base_class_bind = Mock() + session = self._fixture({ + self.tables.base_table: base_class_bind + }) + + is_( + session.get_bind(self.classes.BaseClass), + base_class_bind + ) + + def test_bind_base_table_joined_sub_class(self): + base_class_bind = Mock() + session = self._fixture({ + self.tables.base_table: base_class_bind + }) + + is_( + session.get_bind(self.classes.BaseClass), + base_class_bind + ) + is_( + session.get_bind(self.classes.JoinedSubClass), + base_class_bind + ) + + def test_bind_joined_sub_table_joined_sub_class(self): + base_class_bind = Mock(name='base') + joined_class_bind = Mock(name='joined') + session = self._fixture({ + self.tables.base_table: base_class_bind, + self.tables.joined_sub_table: joined_class_bind + }) + + is_( + session.get_bind(self.classes.BaseClass), + base_class_bind + ) + # joined table inheritance has to query based on the base + # table, so this is what we expect + is_( + session.get_bind(self.classes.JoinedSubClass), + base_class_bind + ) + + def test_bind_base_table_concrete_sub_class(self): + base_class_bind = Mock() + session = self._fixture({ + self.tables.base_table: base_class_bind + }) + + is_( + session.get_bind(self.classes.ConcreteSubClass), + testing.db + ) + + def test_bind_sub_table_concrete_sub_class(self): + base_class_bind = Mock(name='base') + concrete_sub_bind = Mock(name='concrete') + + session = self._fixture({ + self.tables.base_table: base_class_bind, + self.tables.concrete_sub_table: concrete_sub_bind + }) + + is_( + session.get_bind(self.classes.BaseClass), + base_class_bind + ) + is_( + session.get_bind(self.classes.ConcreteSubClass), + concrete_sub_bind + ) + + def test_bind_base_class_base_class(self): + base_class_bind = Mock() + session = self._fixture({ + self.classes.BaseClass: base_class_bind + }) + + is_( + session.get_bind(self.classes.BaseClass), + base_class_bind + ) + + def test_bind_mixin_class_simple_class(self): + base_class_bind = Mock() + session = self._fixture({ + self.classes.MixinOne: base_class_bind + }) + + is_( + session.get_bind(self.classes.ClassWMixin), + base_class_bind + ) + + def test_bind_base_class_joined_sub_class(self): + base_class_bind = Mock() + session = self._fixture({ + self.classes.BaseClass: base_class_bind + }) + + is_( + session.get_bind(self.classes.JoinedSubClass), + base_class_bind + ) + + def test_bind_joined_sub_class_joined_sub_class(self): + base_class_bind = Mock(name='base') + joined_class_bind = Mock(name='joined') + session = self._fixture({ + self.classes.BaseClass: base_class_bind, + self.classes.JoinedSubClass: joined_class_bind + }) + + is_( + session.get_bind(self.classes.BaseClass), + base_class_bind + ) + is_( + session.get_bind(self.classes.JoinedSubClass), + joined_class_bind + ) + + def test_bind_base_class_concrete_sub_class(self): + base_class_bind = Mock() + session = self._fixture({ + self.classes.BaseClass: base_class_bind + }) + + is_( + session.get_bind(self.classes.ConcreteSubClass), + base_class_bind + ) + + def test_bind_sub_class_concrete_sub_class(self): + base_class_bind = Mock(name='base') + concrete_sub_bind = Mock(name='concrete') + + session = self._fixture({ + self.classes.BaseClass: base_class_bind, + self.classes.ConcreteSubClass: concrete_sub_bind + }) + + is_( + session.get_bind(self.classes.BaseClass), + base_class_bind + ) + is_( + session.get_bind(self.classes.ConcreteSubClass), + concrete_sub_bind + ) + + diff --git a/test/orm/test_session.py b/test/orm/test_session.py index 06d1d7334..b0b00d5ed 100644 --- a/test/orm/test_session.py +++ b/test/orm/test_session.py @@ -1403,14 +1403,19 @@ class SessionInterface(fixtures.TestBase): eq_(watchdog, instance_methods, watchdog.symmetric_difference(instance_methods)) - def _test_class_guards(self, user_arg): + def _test_class_guards(self, user_arg, is_class=True): watchdog = set() def raises_(method, *args, **kw): watchdog.add(method) callable_ = getattr(create_session(), method) - assert_raises(sa.orm.exc.UnmappedClassError, - callable_, *args, **kw) + if is_class: + assert_raises( + sa.orm.exc.UnmappedClassError, + callable_, *args, **kw) + else: + assert_raises( + sa.exc.NoInspectionAvailable, callable_, *args, **kw) raises_('connection', mapper=user_arg) @@ -1433,7 +1438,7 @@ class SessionInterface(fixtures.TestBase): def test_unmapped_primitives(self): for prim in ('doh', 123, ('t', 'u', 'p', 'l', 'e')): self._test_instance_guards(prim) - self._test_class_guards(prim) + self._test_class_guards(prim, is_class=False) def test_unmapped_class_for_instance(self): class Unmapped(object): @@ -1457,7 +1462,7 @@ class SessionInterface(fixtures.TestBase): self._map_it(Mapped) self._test_instance_guards(early) - self._test_class_guards(early) + self._test_class_guards(early, is_class=False) class TLTransactionTest(fixtures.MappedTest): -- cgit v1.2.1 From d06e9c361fe0abd7bfdc7ecaceda931d981344db Mon Sep 17 00:00:00 2001 From: mozillazg Date: Wed, 15 Oct 2014 21:40:19 +0800 Subject: Fixed typo --- lib/sqlalchemy/sql/schema.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index f9c65f6f7..ef5d79a48 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -1061,8 +1061,8 @@ class Column(SchemaItem, ColumnClause): conditionally rendered differently on different backends, consider custom compilation rules for :class:`.CreateColumn`. - ..versionadded:: 0.8.3 Added the ``system=True`` parameter to - :class:`.Column`. + .. versionadded:: 0.8.3 Added the ``system=True`` parameter to + :class:`.Column`. """ -- cgit v1.2.1 From a02664869c0991fb8de6d6ddd0f189c5987e9782 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 16 Oct 2014 13:30:39 -0400 Subject: - fix imports - pep8 --- test/orm/test_update_delete.py | 45 ++++++++++++++++++++++-------------------- 1 file changed, 24 insertions(+), 21 deletions(-) diff --git a/test/orm/test_update_delete.py b/test/orm/test_update_delete.py index 35d527ca8..a737a2e1d 100644 --- a/test/orm/test_update_delete.py +++ b/test/orm/test_update_delete.py @@ -1,9 +1,9 @@ from sqlalchemy.testing import eq_, assert_raises, assert_raises_message from sqlalchemy.testing import fixtures -from sqlalchemy import Integer, String, ForeignKey, or_, and_, exc, \ +from sqlalchemy import Integer, String, ForeignKey, or_, exc, \ select, func, Boolean, case, text from sqlalchemy.orm import mapper, relationship, backref, Session, \ - joinedload, aliased + joinedload from sqlalchemy import testing from sqlalchemy.testing.schema import Table, Column @@ -70,12 +70,14 @@ class UpdateDeleteTest(fixtures.MappedTest): ): assert_raises_message( exc.InvalidRequestError, - r"Can't call Query.update\(\) when %s\(\) has been called" % mname, + r"Can't call Query.update\(\) when " + "%s\(\) has been called" % mname, q.update, {'name': 'ed'}) assert_raises_message( exc.InvalidRequestError, - r"Can't call Query.delete\(\) when %s\(\) has been called" % mname, + r"Can't call Query.delete\(\) when " + "%s\(\) has been called" % mname, q.delete) def test_delete(self): @@ -116,7 +118,8 @@ class UpdateDeleteTest(fixtures.MappedTest): sess = Session() john, jack, jill, jane = sess.query(User).order_by(User.id).all() - sess.query(User).filter(or_(User.name == 'john', User.name == 'jill')).\ + sess.query(User).filter( + or_(User.name == 'john', User.name == 'jill')).\ delete(synchronize_session='evaluate') assert john not in sess and jill not in sess sess.rollback() @@ -127,7 +130,8 @@ class UpdateDeleteTest(fixtures.MappedTest): sess = Session() john, jack, jill, jane = sess.query(User).order_by(User.id).all() - sess.query(User).filter(or_(User.name == 'john', User.name == 'jill')).\ + sess.query(User).filter( + or_(User.name == 'john', User.name == 'jill')).\ delete(synchronize_session='fetch') assert john not in sess and jill not in sess sess.rollback() @@ -139,7 +143,8 @@ class UpdateDeleteTest(fixtures.MappedTest): sess = Session() john, jack, jill, jane = sess.query(User).order_by(User.id).all() - sess.query(User).filter(or_(User.name == 'john', User.name == 'jill')).\ + sess.query(User).filter( + or_(User.name == 'john', User.name == 'jill')).\ delete(synchronize_session=False) assert john in sess and jill in sess @@ -152,7 +157,8 @@ class UpdateDeleteTest(fixtures.MappedTest): sess = Session() john, jack, jill, jane = sess.query(User).order_by(User.id).all() - sess.query(User).filter(or_(User.name == 'john', User.name == 'jill')).\ + sess.query(User).filter( + or_(User.name == 'john', User.name == 'jill')).\ delete(synchronize_session='fetch') assert john not in sess and jill not in sess @@ -393,7 +399,7 @@ class UpdateDeleteTest(fixtures.MappedTest): sess.query(User).filter_by(name='j2').\ delete( - synchronize_session='evaluate') + synchronize_session='evaluate') assert john not in sess def test_autoflush_before_fetch_delete(self): @@ -405,7 +411,7 @@ class UpdateDeleteTest(fixtures.MappedTest): sess.query(User).filter_by(name='j2').\ delete( - synchronize_session='fetch') + synchronize_session='fetch') assert john not in sess def test_evaluate_before_update(self): @@ -447,7 +453,7 @@ class UpdateDeleteTest(fixtures.MappedTest): sess.query(User).filter_by(name='john').\ filter_by(age=25).\ delete( - synchronize_session='evaluate') + synchronize_session='evaluate') assert john not in sess def test_fetch_before_delete(self): @@ -460,7 +466,7 @@ class UpdateDeleteTest(fixtures.MappedTest): sess.query(User).filter_by(name='john').\ filter_by(age=25).\ delete( - synchronize_session='fetch') + synchronize_session='fetch') assert john not in sess @@ -540,7 +546,8 @@ class UpdateDeleteIgnoresLoadersTest(fixtures.MappedTest): sess = Session() john, jack, jill, jane = sess.query(User).order_by(User.id).all() - sess.query(User).options(joinedload(User.documents)).filter(User.age > 29).\ + sess.query(User).options( + joinedload(User.documents)).filter(User.age > 29).\ update({'age': User.age - 10}, synchronize_session='fetch') eq_([john.age, jack.age, jill.age, jane.age], [25, 37, 29, 27]) @@ -632,8 +639,7 @@ class UpdateDeleteFromTest(fixtures.MappedTest): set([ (1, True), (2, None), (3, None), (4, True), - (5, True), (6, None), - ]) + (5, True), (6, None)]) ) def test_no_eval_against_multi_table_criteria(self): @@ -666,8 +672,7 @@ class UpdateDeleteFromTest(fixtures.MappedTest): set([ (1, True), (2, None), (3, None), (4, True), - (5, True), (6, None), - ]) + (5, True), (6, None)]) ) @testing.requires.update_where_target_in_subquery @@ -690,8 +695,7 @@ class UpdateDeleteFromTest(fixtures.MappedTest): set([ (1, True), (2, False), (3, False), (4, True), - (5, True), (6, False), - ]) + (5, True), (6, False)]) ) @testing.only_on('mysql', 'Multi table update') @@ -706,8 +710,7 @@ class UpdateDeleteFromTest(fixtures.MappedTest): filter(User.id == 2).update({ Document.samename: 'd_samename', User.samename: 'u_samename' - }, synchronize_session=False - ) + }, synchronize_session=False) eq_( s.query(User.id, Document.samename, User.samename). filter(User.id == Document.user_id). -- cgit v1.2.1 From 61a4a89d993eda1d3168b501ba9ed8d94ea9b5f8 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 16 Oct 2014 14:36:56 -0400 Subject: - The :meth:`.Query.update` method will now convert string key names in the given dictionary of values into mapped attribute names against the mapped class being updated. Previously, string names were taken in directly and passed to the core update statement without any means to resolve against the mapped entity. Support for synonyms and hybrid attributes as the subject attributes of :meth:`.Query.update` are also supported. fixes #3228 --- doc/build/changelog/changelog_10.rst | 16 ++++ doc/build/changelog/migration_10.rst | 57 ++++++++++++++ lib/sqlalchemy/orm/persistence.py | 43 +++++++++-- lib/sqlalchemy/orm/query.py | 26 +++++-- test/orm/test_update_delete.py | 140 ++++++++++++++++++++++++++++++++--- 5 files changed, 258 insertions(+), 24 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 66fa2ad26..ec812a091 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,22 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: bug, orm + :tickets: 3228 + + The :meth:`.Query.update` method will now convert string key + names in the given dictionary of values into mapped attribute names + against the mapped class being updated. Previously, string names + were taken in directly and passed to the core update statement without + any means to resolve against the mapped entity. Support for synonyms + and hybrid attributes as the subject attributes of + :meth:`.Query.update` are also supported. + + .. seealso:: + + :ref:`bug_3228` + .. change:: :tags: bug, orm :tickets: 3035 diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index dd8964f8b..3591ee0e2 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -510,6 +510,7 @@ of inheritance-oriented scenarios, including: :ticket:`3035` + .. _feature_3178: New systems to safely emit parameterized warnings @@ -793,6 +794,62 @@ would again fail; these have also been fixed. Behavioral Changes - ORM ======================== +.. _bug_3228: + +query.update() now resolves string names into mapped attribute names +-------------------------------------------------------------------- + +The documentation for :meth:`.Query.update` states that the given +``values`` dictionary is "a dictionary with attributes names as keys", +implying that these are mapped attribute names. Unfortunately, the function +was designed more in mind to receive attributes and SQL expressions and +not as much strings; when strings +were passed, these strings would be passed through straight to the core +update statement without any resolution as far as how these names are +represented on the mapped class, meaning the name would have to match that +of a table column exactly, not how an attribute of that name was mapped +onto the class. + +The string names are now resolved as attribute names in earnest:: + + class User(Base): + __tablename__ = 'user' + + id = Column(Integer, primary_key=True) + name = Column('user_name', String(50)) + +Above, the column ``user_name`` is mapped as ``name``. Previously, +a call to :meth:`.Query.update` that was passed strings would have to +have been called as follows:: + + session.query(User).update({'user_name': 'moonbeam'}) + +The given string is now resolved against the entity:: + + session.query(User).update({'name': 'moonbeam'}) + +It is typically preferable to use the attribute directly, to avoid any +ambiguity:: + + session.query(User).update({User.name: 'moonbeam'}) + +The change also indicates that synonyms and hybrid attributes can be referred +to by string name as well:: + + class User(Base): + __tablename__ = 'user' + + id = Column(Integer, primary_key=True) + name = Column('user_name', String(50)) + + @hybrid_property + def fullname(self): + return self.name + + session.query(User).update({'fullname': 'moonbeam'}) + +:ticket:`3228` + .. _migration_3061: Changes to attribute events and other operations regarding attributes that have no pre-existing value diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py index 74e69e44c..114b79ea5 100644 --- a/lib/sqlalchemy/orm/persistence.py +++ b/lib/sqlalchemy/orm/persistence.py @@ -18,7 +18,7 @@ import operator from itertools import groupby from .. import sql, util, exc as sa_exc, schema from . import attributes, sync, exc as orm_exc, evaluator -from .base import state_str, _attr_as_key +from .base import state_str, _attr_as_key, _entity_descriptor from ..sql import expression from . import loading @@ -987,6 +987,7 @@ class BulkUpdate(BulkUD): super(BulkUpdate, self).__init__(query) self.query._no_select_modifiers("update") self.values = values + self.mapper = self.query._mapper_zero_or_none() @classmethod def factory(cls, query, synchronize_session, values): @@ -996,9 +997,40 @@ class BulkUpdate(BulkUD): False: BulkUpdate }, synchronize_session, query, values) + def _resolve_string_to_expr(self, key): + if self.mapper and isinstance(key, util.string_types): + attr = _entity_descriptor(self.mapper, key) + return attr.__clause_element__() + else: + return key + + def _resolve_key_to_attrname(self, key): + if self.mapper and isinstance(key, util.string_types): + attr = _entity_descriptor(self.mapper, key) + return attr.property.key + elif isinstance(key, attributes.InstrumentedAttribute): + return key.key + elif hasattr(key, '__clause_element__'): + key = key.__clause_element__() + + if self.mapper and isinstance(key, expression.ColumnElement): + try: + attr = self.mapper._columntoproperty[key] + except orm_exc.UnmappedColumnError: + return None + else: + return attr.key + else: + raise sa_exc.InvalidRequestError( + "Invalid expression type: %r" % key) + def _do_exec(self): + values = dict( + (self._resolve_string_to_expr(k), v) + for k, v in self.values.items() + ) update_stmt = sql.update(self.primary_table, - self.context.whereclause, self.values) + self.context.whereclause, values) self.result = self.query.session.execute( update_stmt, params=self.query._params) @@ -1044,9 +1076,10 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate): def _additional_evaluators(self, evaluator_compiler): self.value_evaluators = {} for key, value in self.values.items(): - key = _attr_as_key(key) - self.value_evaluators[key] = evaluator_compiler.process( - expression._literal_as_binds(value)) + key = self._resolve_key_to_attrname(key) + if key is not None: + self.value_evaluators[key] = evaluator_compiler.process( + expression._literal_as_binds(value)) def _do_post_synchronize(self): session = self.query.session diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index 7b2ea7977..fce7a3665 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -2756,9 +2756,25 @@ class Query(object): Updates rows matched by this query in the database. - :param values: a dictionary with attributes names as keys and literal + E.g.:: + + sess.query(User).filter(User.age == 25).\ + update({User.age: User.age - 10}, synchronize_session='fetch') + + + sess.query(User).filter(User.age == 25).\ + update({"age": User.age - 10}, synchronize_session='evaluate') + + + :param values: a dictionary with attributes names, or alternatively + mapped attributes or SQL expressions, as keys, and literal values or sql expressions as values. + .. versionchanged:: 1.0.0 - string names in the values dictionary + are now resolved against the mapped entity; previously, these + strings were passed as literal column names with no mapper-level + translation. + :param synchronize_session: chooses the strategy to update the attributes on objects in the session. Valid values are: @@ -2796,7 +2812,7 @@ class Query(object): which normally occurs upon :meth:`.Session.commit` or can be forced by using :meth:`.Session.expire_all`. - * As of 0.8, this method will support multiple table updates, as + * The method supports multiple table updates, as detailed in :ref:`multi_table_updates`, and this behavior does extend to support updates of joined-inheritance and other multiple table mappings. However, the **join condition of an inheritance @@ -2827,12 +2843,6 @@ class Query(object): """ - # TODO: value keys need to be mapped to corresponding sql cols and - # instr.attr.s to string keys - # TODO: updates of manytoone relationships need to be converted to - # fk assignments - # TODO: cascades need handling. - update_op = persistence.BulkUpdate.factory( self, synchronize_session, values) update_op.exec_() diff --git a/test/orm/test_update_delete.py b/test/orm/test_update_delete.py index a737a2e1d..a3ad37e60 100644 --- a/test/orm/test_update_delete.py +++ b/test/orm/test_update_delete.py @@ -1,9 +1,9 @@ from sqlalchemy.testing import eq_, assert_raises, assert_raises_message from sqlalchemy.testing import fixtures from sqlalchemy import Integer, String, ForeignKey, or_, exc, \ - select, func, Boolean, case, text + select, func, Boolean, case, text, column from sqlalchemy.orm import mapper, relationship, backref, Session, \ - joinedload + joinedload, synonym from sqlalchemy import testing from sqlalchemy.testing.schema import Table, Column @@ -18,7 +18,7 @@ class UpdateDeleteTest(fixtures.MappedTest): Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(32)), - Column('age', Integer)) + Column('age_int', Integer)) @classmethod def setup_classes(cls): @@ -30,10 +30,10 @@ class UpdateDeleteTest(fixtures.MappedTest): users = cls.tables.users users.insert().execute([ - dict(id=1, name='john', age=25), - dict(id=2, name='jack', age=47), - dict(id=3, name='jill', age=29), - dict(id=4, name='jane', age=37), + dict(id=1, name='john', age_int=25), + dict(id=2, name='jack', age_int=47), + dict(id=3, name='jill', age_int=29), + dict(id=4, name='jane', age_int=37), ]) @classmethod @@ -41,7 +41,9 @@ class UpdateDeleteTest(fixtures.MappedTest): User = cls.classes.User users = cls.tables.users - mapper(User, users) + mapper(User, users, properties={ + 'age': users.c.age_int + }) def test_illegal_eval(self): User = self.classes.User @@ -80,6 +82,108 @@ class UpdateDeleteTest(fixtures.MappedTest): "%s\(\) has been called" % mname, q.delete) + def test_evaluate_clauseelement(self): + User = self.classes.User + + class Thing(object): + def __clause_element__(self): + return User.name.__clause_element__() + + s = Session() + jill = s.query(User).get(3) + s.query(User).update( + {Thing(): 'moonbeam'}, + synchronize_session='evaluate') + eq_(jill.name, 'moonbeam') + + def test_evaluate_invalid(self): + User = self.classes.User + + class Thing(object): + def __clause_element__(self): + return 5 + + s = Session() + + assert_raises_message( + exc.InvalidRequestError, + "Invalid expression type: 5", + s.query(User).update, {Thing(): 'moonbeam'}, + synchronize_session='evaluate' + ) + + def test_evaluate_unmapped_col(self): + User = self.classes.User + + s = Session() + jill = s.query(User).get(3) + s.query(User).update( + {column('name'): 'moonbeam'}, + synchronize_session='evaluate') + eq_(jill.name, 'jill') + s.expire(jill) + eq_(jill.name, 'moonbeam') + + def test_evaluate_synonym_string(self): + class Foo(object): + pass + mapper(Foo, self.tables.users, properties={ + 'uname': synonym("name", ) + }) + + s = Session() + jill = s.query(Foo).get(3) + s.query(Foo).update( + {'uname': 'moonbeam'}, + synchronize_session='evaluate') + eq_(jill.uname, 'moonbeam') + + def test_evaluate_synonym_attr(self): + class Foo(object): + pass + mapper(Foo, self.tables.users, properties={ + 'uname': synonym("name", ) + }) + + s = Session() + jill = s.query(Foo).get(3) + s.query(Foo).update( + {Foo.uname: 'moonbeam'}, + synchronize_session='evaluate') + eq_(jill.uname, 'moonbeam') + + def test_evaluate_double_synonym_attr(self): + class Foo(object): + pass + mapper(Foo, self.tables.users, properties={ + 'uname': synonym("name"), + 'ufoo': synonym('uname') + }) + + s = Session() + jill = s.query(Foo).get(3) + s.query(Foo).update( + {Foo.ufoo: 'moonbeam'}, + synchronize_session='evaluate') + eq_(jill.ufoo, 'moonbeam') + + def test_evaluate_hybrid_attr(self): + from sqlalchemy.ext.hybrid import hybrid_property + + class Foo(object): + @hybrid_property + def uname(self): + return self.name + + mapper(Foo, self.tables.users) + + s = Session() + jill = s.query(Foo).get(3) + s.query(Foo).update( + {Foo.uname: 'moonbeam'}, + synchronize_session='evaluate') + eq_(jill.uname, 'moonbeam') + def test_delete(self): User = self.classes.User @@ -208,7 +312,8 @@ class UpdateDeleteTest(fixtures.MappedTest): sess.query(User).filter(User.age > 27).\ update( - {users.c.age: User.age - 10}, synchronize_session='evaluate') + {users.c.age_int: User.age - 10}, + synchronize_session='evaluate') eq_([john.age, jack.age, jill.age, jane.age], [25, 27, 19, 27]) eq_(sess.query(User.age).order_by( User.id).all(), list(zip([25, 27, 19, 27]))) @@ -219,12 +324,25 @@ class UpdateDeleteTest(fixtures.MappedTest): eq_(sess.query(User.age).order_by( User.id).all(), list(zip([15, 27, 19, 27]))) + def test_update_against_table_col(self): + User, users = self.classes.User, self.tables.users + + sess = Session() + john, jack, jill, jane = sess.query(User).order_by(User.id).all() + eq_([john.age, jack.age, jill.age, jane.age], [25, 47, 29, 37]) + sess.query(User).filter(User.age > 27).\ + update( + {users.c.age_int: User.age - 10}, + synchronize_session='evaluate') + eq_([john.age, jack.age, jill.age, jane.age], [25, 37, 19, 27]) + def test_update_against_metadata(self): User, users = self.classes.User, self.tables.users sess = Session() - sess.query(users).update({users.c.age: 29}, synchronize_session=False) + sess.query(users).update( + {users.c.age_int: 29}, synchronize_session=False) eq_(sess.query(User.age).order_by( User.id).all(), list(zip([29, 29, 29, 29]))) @@ -235,7 +353,7 @@ class UpdateDeleteTest(fixtures.MappedTest): john, jack, jill, jane = sess.query(User).order_by(User.id).all() - sess.query(User).filter(text('age > :x')).params(x=29).\ + sess.query(User).filter(text('age_int > :x')).params(x=29).\ update({'age': User.age - 10}, synchronize_session='fetch') eq_([john.age, jack.age, jill.age, jane.age], [25, 37, 29, 27]) -- cgit v1.2.1 From 2924f8685c1d9f25820aa154691afe2d2de645f4 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 17 Oct 2014 19:29:32 -0400 Subject: pep8 --- test/base/test_except.py | 92 ++++++++++++++++++++++++++++-------------------- 1 file changed, 54 insertions(+), 38 deletions(-) diff --git a/test/base/test_except.py b/test/base/test_except.py index 359473c54..a438e26d9 100644 --- a/test/base/test_except.py +++ b/test/base/test_except.py @@ -19,6 +19,7 @@ class OperationalError(DatabaseError): class ProgrammingError(DatabaseError): + def __str__(self): return '<%s>' % self.bogus @@ -31,89 +32,102 @@ class WrapTest(fixtures.TestBase): def test_db_error_normal(self): try: - raise sa_exceptions.DBAPIError.instance('', [], - OperationalError(), DatabaseError) + raise sa_exceptions.DBAPIError.instance( + '', [], + OperationalError(), DatabaseError) except sa_exceptions.DBAPIError: self.assert_(True) def test_tostring(self): try: - raise sa_exceptions.DBAPIError.instance('this is a message' - , None, OperationalError(), DatabaseError) + raise sa_exceptions.DBAPIError.instance( + 'this is a message', + None, OperationalError(), DatabaseError) except sa_exceptions.DBAPIError as exc: assert str(exc) \ == "(OperationalError) 'this is a message' None" def test_tostring_large_dict(self): try: - raise sa_exceptions.DBAPIError.instance('this is a message' - , - {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7, 'h': - 8, 'i': 9, 'j': 10, 'k': 11, - }, OperationalError(), DatabaseError) + raise sa_exceptions.DBAPIError.instance( + 'this is a message', + { + 'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7, + 'h': 8, 'i': 9, 'j': 10, 'k': 11 + }, + OperationalError(), DatabaseError) except sa_exceptions.DBAPIError as exc: - assert str(exc).startswith("(OperationalError) 'this is a " - "message' {") + assert str(exc).startswith( + "(OperationalError) 'this is a message' {") def test_tostring_large_list(self): try: - raise sa_exceptions.DBAPIError.instance('this is a message', - [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,], + raise sa_exceptions.DBAPIError.instance( + 'this is a message', + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11], OperationalError(), DatabaseError) except sa_exceptions.DBAPIError as exc: - assert str(exc).startswith("(OperationalError) 'this is a " - "message' [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]") + assert str(exc).startswith( + "(OperationalError) 'this is a " + "message' [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]") def test_tostring_large_executemany(self): try: - raise sa_exceptions.DBAPIError.instance('this is a message', + raise sa_exceptions.DBAPIError.instance( + 'this is a message', [{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, - {1: 1}, {1:1}, {1: 1}, {1: 1},], + {1: 1}, {1: 1}, {1: 1}, {1: 1}, ], OperationalError(), DatabaseError) except sa_exceptions.DBAPIError as exc: - eq_(str(exc) , - "(OperationalError) 'this is a message' [{1: 1}, "\ - "{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: "\ - "1}, {1: 1}, {1: 1}]") + eq_( + str(exc), + "(OperationalError) 'this is a message' [{1: 1}, " + "{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: " + "1}, {1: 1}, {1: 1}]" + ) try: raise sa_exceptions.DBAPIError.instance('this is a message', [ {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, - {1:1}, {1: 1}, {1: 1}, {1: 1}, - ], OperationalError(), DatabaseError) + {1: 1}, {1: 1}, {1: 1}, {1: 1}, + ], OperationalError(), DatabaseError) except sa_exceptions.DBAPIError as exc: - eq_(str(exc) , + eq_(str(exc), "(OperationalError) 'this is a message' [{1: 1}, " "{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, " "{1: 1}, {1: 1} ... displaying 10 of 11 total " "bound parameter sets ... {1: 1}, {1: 1}]" - ) + ) try: - raise sa_exceptions.DBAPIError.instance('this is a message', + raise sa_exceptions.DBAPIError.instance( + 'this is a message', [ - (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), - (1, ), + (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), + (1, ), (1, ), (1, ), (1, ), ], OperationalError(), DatabaseError) + except sa_exceptions.DBAPIError as exc: - eq_(str(exc), - "(OperationalError) 'this is a message' [(1,), "\ + eq_( + str(exc), + "(OperationalError) 'this is a message' [(1,), " "(1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,)]") try: raise sa_exceptions.DBAPIError.instance('this is a message', [ (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), - ], OperationalError(), DatabaseError) + ], OperationalError(), DatabaseError) except sa_exceptions.DBAPIError as exc: eq_(str(exc), "(OperationalError) 'this is a message' [(1,), " "(1,), (1,), (1,), (1,), (1,), (1,), (1,) " "... displaying 10 of 11 total bound " "parameter sets ... (1,), (1,)]" - ) + ) def test_db_error_busted_dbapi(self): try: - raise sa_exceptions.DBAPIError.instance('', [], - ProgrammingError(), DatabaseError) + raise sa_exceptions.DBAPIError.instance( + '', [], + ProgrammingError(), DatabaseError) except sa_exceptions.DBAPIError as e: self.assert_(True) self.assert_('Error in str() of DB-API' in e.args[0]) @@ -140,8 +154,9 @@ class WrapTest(fixtures.TestBase): def test_db_error_keyboard_interrupt(self): try: - raise sa_exceptions.DBAPIError.instance('', [], - KeyboardInterrupt(), DatabaseError) + raise sa_exceptions.DBAPIError.instance( + '', [], + KeyboardInterrupt(), DatabaseError) except sa_exceptions.DBAPIError: self.assert_(False) except KeyboardInterrupt: @@ -149,8 +164,9 @@ class WrapTest(fixtures.TestBase): def test_db_error_system_exit(self): try: - raise sa_exceptions.DBAPIError.instance('', [], - SystemExit(), DatabaseError) + raise sa_exceptions.DBAPIError.instance( + '', [], + SystemExit(), DatabaseError) except sa_exceptions.DBAPIError: self.assert_(False) except SystemExit: -- cgit v1.2.1 From 6f40eb37cbdcdae032d73c3537df1e01d2e9e67c Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 17 Oct 2014 19:37:45 -0400 Subject: - Exception messages have been spiffed up a bit. The SQL statement and parameters are not displayed if None, reducing confusion for error messages that weren't related to a statement. The full module and classname for the DBAPI-level exception is displayed, making it clear that this is a wrapped DBAPI exception. The statement and parameters themselves are bounded within a bracketed sections to better isolate them from the error message and from each other. fixes #3172 --- doc/build/changelog/changelog_10.rst | 13 +++++++++++++ lib/sqlalchemy/exc.py | 21 ++++++++++++--------- test/base/test_except.py | 34 +++++++++++++++++++++------------- test/engine/test_execute.py | 6 +++--- test/engine/test_logging.py | 8 +++++--- test/sql/test_query.py | 7 +++---- 6 files changed, 57 insertions(+), 32 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index ec812a091..5aed3bddd 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,19 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: feature, sql + :tickets: 3172 + + Exception messages have been spiffed up a bit. The SQL statement + and parameters are not displayed if None, reducing confusion for + error messages that weren't related to a statement. The full + module and classname for the DBAPI-level exception is displayed, + making it clear that this is a wrapped DBAPI exception. The + statement and parameters themselves are bounded within a bracketed + sections to better isolate them from the error message and from + each other. + .. change:: :tags: bug, orm :tickets: 3228 diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py index 5d35dc2e7..3271d09d4 100644 --- a/lib/sqlalchemy/exc.py +++ b/lib/sqlalchemy/exc.py @@ -238,14 +238,16 @@ class StatementError(SQLAlchemyError): def __str__(self): from sqlalchemy.sql import util - params_repr = util._repr_params(self.params, 10) + details = [SQLAlchemyError.__str__(self)] + if self.statement: + details.append("[SQL: %r]" % self.statement) + if self.params: + params_repr = util._repr_params(self.params, 10) + details.append("[parameters: %r]" % params_repr) return ' '.join([ "(%s)" % det for det in self.detail - ] + [ - SQLAlchemyError.__str__(self), - repr(self.statement), repr(params_repr) - ]) + ] + details) def __unicode__(self): return self.__str__() @@ -289,10 +291,10 @@ class DBAPIError(StatementError): # not a DBAPI error, statement is present. # raise a StatementError if not isinstance(orig, dbapi_base_err) and statement: - msg = traceback.format_exception_only( - orig.__class__, orig)[-1].strip() return StatementError( - "%s (original cause: %s)" % (str(orig), msg), + "(%s.%s) %s" % + (orig.__class__.__module__, orig.__class__.__name__, + orig), statement, params, orig ) @@ -316,7 +318,8 @@ class DBAPIError(StatementError): text = 'Error in str() of DB-API-generated exception: ' + str(e) StatementError.__init__( self, - '(%s) %s' % (orig.__class__.__name__, text), + '(%s.%s) %s' % ( + orig.__class__.__module__, orig.__class__.__name__, text, ), statement, params, orig diff --git a/test/base/test_except.py b/test/base/test_except.py index a438e26d9..918e7a042 100644 --- a/test/base/test_except.py +++ b/test/base/test_except.py @@ -44,8 +44,10 @@ class WrapTest(fixtures.TestBase): 'this is a message', None, OperationalError(), DatabaseError) except sa_exceptions.DBAPIError as exc: - assert str(exc) \ - == "(OperationalError) 'this is a message' None" + eq_( + str(exc), + "(test.base.test_except.OperationalError) " + "[SQL: 'this is a message']") def test_tostring_large_dict(self): try: @@ -58,7 +60,8 @@ class WrapTest(fixtures.TestBase): OperationalError(), DatabaseError) except sa_exceptions.DBAPIError as exc: assert str(exc).startswith( - "(OperationalError) 'this is a message' {") + "(test.base.test_except.OperationalError) " + "[SQL: 'this is a message'] [parameters: {") def test_tostring_large_list(self): try: @@ -68,8 +71,9 @@ class WrapTest(fixtures.TestBase): OperationalError(), DatabaseError) except sa_exceptions.DBAPIError as exc: assert str(exc).startswith( - "(OperationalError) 'this is a " - "message' [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]") + "(test.base.test_except.OperationalError) " + "[SQL: 'this is a message'] [parameters: " + "[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]]") def test_tostring_large_executemany(self): try: @@ -81,9 +85,10 @@ class WrapTest(fixtures.TestBase): except sa_exceptions.DBAPIError as exc: eq_( str(exc), - "(OperationalError) 'this is a message' [{1: 1}, " + "(test.base.test_except.OperationalError) " + "[SQL: 'this is a message'] [parameters: [{1: 1}, " "{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: " - "1}, {1: 1}, {1: 1}]" + "1}, {1: 1}, {1: 1}]]" ) try: raise sa_exceptions.DBAPIError.instance('this is a message', [ @@ -92,10 +97,11 @@ class WrapTest(fixtures.TestBase): ], OperationalError(), DatabaseError) except sa_exceptions.DBAPIError as exc: eq_(str(exc), - "(OperationalError) 'this is a message' [{1: 1}, " + "(test.base.test_except.OperationalError) " + "[SQL: 'this is a message'] [parameters: [{1: 1}, " "{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, " "{1: 1}, {1: 1} ... displaying 10 of 11 total " - "bound parameter sets ... {1: 1}, {1: 1}]" + "bound parameter sets ... {1: 1}, {1: 1}]]" ) try: raise sa_exceptions.DBAPIError.instance( @@ -108,8 +114,9 @@ class WrapTest(fixtures.TestBase): except sa_exceptions.DBAPIError as exc: eq_( str(exc), - "(OperationalError) 'this is a message' [(1,), " - "(1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,)]") + "(test.base.test_except.OperationalError) " + "[SQL: 'this is a message'] [parameters: [(1,), " + "(1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,)]]") try: raise sa_exceptions.DBAPIError.instance('this is a message', [ (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), @@ -117,10 +124,11 @@ class WrapTest(fixtures.TestBase): ], OperationalError(), DatabaseError) except sa_exceptions.DBAPIError as exc: eq_(str(exc), - "(OperationalError) 'this is a message' [(1,), " + "(test.base.test_except.OperationalError) " + "[SQL: 'this is a message'] [parameters: [(1,), " "(1,), (1,), (1,), (1,), (1,), (1,), (1,) " "... displaying 10 of 11 total bound " - "parameter sets ... (1,), (1,)]" + "parameter sets ... (1,), (1,)]]" ) def test_db_error_busted_dbapi(self): diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py index e0bba0afa..00b4ba7f3 100644 --- a/test/engine/test_execute.py +++ b/test/engine/test_execute.py @@ -285,7 +285,7 @@ class ExecuteTest(fixtures.TestBase): def _go(conn): assert_raises_message( tsa.exc.StatementError, - r"nope \(original cause: Exception: nope\) u?'SELECT 1 ", + r"\(exceptions.Exception\) nope \[SQL\: u?'SELECT 1 ", conn.execute, select([1]). where( @@ -1608,7 +1608,7 @@ class HandleErrorTest(fixtures.TestBase): with engine.connect() as conn: assert_raises_message( tsa.exc.StatementError, - r"nope \(original cause: Exception: nope\) u?'SELECT 1 ", + r"\(exceptions.Exception\) nope \[SQL\: u?'SELECT 1 ", conn.execute, select([1]).where( column('foo') == literal('bar', MyType())) @@ -1799,7 +1799,7 @@ class HandleErrorTest(fixtures.TestBase): with engine.connect() as conn: assert_raises_message( tsa.exc.StatementError, - r"nope \(original cause: Exception: nope\) u?'SELECT 1 ", + r"\(exceptions.Exception\) nope \[SQL\: u?'SELECT 1 ", conn.execute, select([1]).where( column('foo') == literal('bar', MyType())) diff --git a/test/engine/test_logging.py b/test/engine/test_logging.py index 1432a0f7b..180ea9388 100644 --- a/test/engine/test_logging.py +++ b/test/engine/test_logging.py @@ -56,7 +56,8 @@ class LogParamsTest(fixtures.TestBase): def test_error_large_dict(self): assert_raises_message( tsa.exc.DBAPIError, - r".*'INSERT INTO nonexistent \(data\) values \(:data\)' " + r".*'INSERT INTO nonexistent \(data\) values \(:data\)'\] " + "\[parameters: " "\[{'data': '0'}, {'data': '1'}, {'data': '2'}, " "{'data': '3'}, {'data': '4'}, {'data': '5'}, " "{'data': '6'}, {'data': '7'} ... displaying 10 of " @@ -71,8 +72,9 @@ class LogParamsTest(fixtures.TestBase): assert_raises_message( tsa.exc.DBAPIError, r".*INSERT INTO nonexistent \(data\) values " - "\(\?\)' \[\('0',\), \('1',\), \('2',\), \('3',\), " - "\('4',\), \('5',\), \('6',\), \('7',\) ... displaying " + "\(\?\)'\] \[parameters: \[\('0',\), \('1',\), \('2',\), \('3',\), " + "\('4',\), \('5',\), \('6',\), \('7',\) " + "... displaying " "10 of 100 total bound parameter sets ... " "\('98',\), \('99',\)\]", lambda: self.eng.execute( diff --git a/test/sql/test_query.py b/test/sql/test_query.py index fc040dfed..2f13486eb 100644 --- a/test/sql/test_query.py +++ b/test/sql/test_query.py @@ -81,11 +81,10 @@ class QueryTest(fixtures.TestBase): assert_raises_message( exc.StatementError, - r"A value is required for bind parameter 'user_name', in " + r"\(sqlalchemy.exc.InvalidRequestError\) A value is required for " + "bind parameter 'user_name', in " "parameter group 2 " - "\(original cause: (sqlalchemy.exc.)?InvalidRequestError: A " - "value is required for bind parameter 'user_name', in " - "parameter group 2\) u?'INSERT INTO query_users", + r"\[SQL: u?'INSERT INTO query_users", users.insert().execute, {'user_id': 7, 'user_name': 'jack'}, {'user_id': 8, 'user_name': 'ed'}, -- cgit v1.2.1 From a7c1258d0340e94fd12e1b8aaa82ca3e282fb61d Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 18 Oct 2014 22:32:07 -0400 Subject: - flake8 --- test/orm/test_relationships.py | 2016 ++++++++++++++++++++++------------------ 1 file changed, 1097 insertions(+), 919 deletions(-) diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py index 6bcb02639..4c5a5abee 100644 --- a/test/orm/test_relationships.py +++ b/test/orm/test_relationships.py @@ -5,20 +5,22 @@ from sqlalchemy import testing from sqlalchemy import Integer, String, ForeignKey, MetaData, and_ from sqlalchemy.testing.schema import Table, Column from sqlalchemy.orm import mapper, relationship, relation, \ - backref, create_session, configure_mappers, \ - clear_mappers, sessionmaker, attributes,\ - Session, composite, column_property, foreign,\ - remote, synonym, joinedload, subqueryload -from sqlalchemy.orm.interfaces import ONETOMANY, MANYTOONE, MANYTOMANY + backref, create_session, configure_mappers, \ + clear_mappers, sessionmaker, attributes,\ + Session, composite, column_property, foreign,\ + remote, synonym, joinedload, subqueryload +from sqlalchemy.orm.interfaces import ONETOMANY, MANYTOONE from sqlalchemy.testing import eq_, startswith_, AssertsCompiledSQL, is_ from sqlalchemy.testing import fixtures from test.orm import _fixtures from sqlalchemy import exc from sqlalchemy import inspect + class _RelationshipErrors(object): + def _assert_raises_no_relevant_fks(self, fn, expr, relname, - primary, *arg, **kw): + primary, *arg, **kw): assert_raises_message( sa.exc.ArgumentError, "Could not locate any relevant foreign key columns " @@ -33,7 +35,7 @@ class _RelationshipErrors(object): ) def _assert_raises_no_equality(self, fn, expr, relname, - primary, *arg, **kw): + primary, *arg, **kw): assert_raises_message( sa.exc.ArgumentError, "Could not locate any simple equality expressions " @@ -50,7 +52,7 @@ class _RelationshipErrors(object): ) def _assert_raises_ambig_join(self, fn, relname, secondary_arg, - *arg, **kw): + *arg, **kw): if secondary_arg is not None: assert_raises_message( exc.ArgumentError, @@ -78,7 +80,7 @@ class _RelationshipErrors(object): fn, *arg, **kw) def _assert_raises_no_join(self, fn, relname, secondary_arg, - *arg, **kw): + *arg, **kw): if secondary_arg is not None: assert_raises_message( exc.NoForeignKeysError, @@ -86,7 +88,8 @@ class _RelationshipErrors(object): "parent/child tables on relationship %s - " "there are no foreign keys linking these tables " "via secondary table '%s'. " - "Ensure that referencing columns are associated with a ForeignKey " + "Ensure that referencing columns are associated with a " + "ForeignKey " "or ForeignKeyConstraint, or specify 'primaryjoin' and " "'secondaryjoin' expressions" % (relname, secondary_arg), @@ -97,7 +100,8 @@ class _RelationshipErrors(object): "Could not determine join condition between " "parent/child tables on relationship %s - " "there are no foreign keys linking these tables. " - "Ensure that referencing columns are associated with a ForeignKey " + "Ensure that referencing columns are associated with a " + "ForeignKey " "or ForeignKeyConstraint, or specify a 'primaryjoin' " "expression." % (relname,), @@ -125,12 +129,16 @@ class _RelationshipErrors(object): "pairs based on join condition and remote_side arguments. " r"Consider using the remote\(\) annotation to " "accurately mark those elements of the join " - "condition that are on the remote side of the relationship." % relname, + "condition that are on the remote side of the relationship." % ( + relname + ), fn, *arg, **kw ) + class DependencyTwoParentTest(fixtures.MappedTest): + """Test flush() when a mapper is dependent on multiple relationships""" run_setup_mappers = 'once' @@ -140,74 +148,77 @@ class DependencyTwoParentTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table("tbl_a", metadata, - Column("id", Integer, primary_key=True, - test_needs_autoincrement=True), - Column("name", String(128))) + Column("id", Integer, primary_key=True, + test_needs_autoincrement=True), + Column("name", String(128))) Table("tbl_b", metadata, - Column("id", Integer, primary_key=True, - test_needs_autoincrement=True), - Column("name", String(128))) + Column("id", Integer, primary_key=True, + test_needs_autoincrement=True), + Column("name", String(128))) Table("tbl_c", metadata, - Column("id", Integer, primary_key=True, - test_needs_autoincrement=True), - Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"), - nullable=False), - Column("name", String(128))) + Column("id", Integer, primary_key=True, + test_needs_autoincrement=True), + Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"), + nullable=False), + Column("name", String(128))) Table("tbl_d", metadata, - Column("id", Integer, primary_key=True, - test_needs_autoincrement=True), - Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"), - nullable=False), - Column("tbl_b_id", Integer, ForeignKey("tbl_b.id")), - Column("name", String(128))) + Column("id", Integer, primary_key=True, + test_needs_autoincrement=True), + Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"), + nullable=False), + Column("tbl_b_id", Integer, ForeignKey("tbl_b.id")), + Column("name", String(128))) @classmethod def setup_classes(cls): class A(cls.Basic): pass + class B(cls.Basic): pass + class C(cls.Basic): pass + class D(cls.Basic): pass @classmethod def setup_mappers(cls): A, C, B, D, tbl_b, tbl_c, tbl_a, tbl_d = (cls.classes.A, - cls.classes.C, - cls.classes.B, - cls.classes.D, - cls.tables.tbl_b, - cls.tables.tbl_c, - cls.tables.tbl_a, - cls.tables.tbl_d) + cls.classes.C, + cls.classes.B, + cls.classes.D, + cls.tables.tbl_b, + cls.tables.tbl_c, + cls.tables.tbl_a, + cls.tables.tbl_d) mapper(A, tbl_a, properties=dict( c_rows=relationship(C, cascade="all, delete-orphan", - backref="a_row"))) + backref="a_row"))) mapper(B, tbl_b) mapper(C, tbl_c, properties=dict( d_rows=relationship(D, cascade="all, delete-orphan", - backref="c_row"))) + backref="c_row"))) mapper(D, tbl_d, properties=dict( b_row=relationship(B))) @classmethod def insert_data(cls): A, C, B, D = (cls.classes.A, - cls.classes.C, - cls.classes.B, - cls.classes.D) + cls.classes.C, + cls.classes.B, + cls.classes.D) session = create_session() a = A(name='a1') b = B(name='b1') c = C(name='c1', a_row=a) - d1 = D(name='d1', b_row=b, c_row=c) - d2 = D(name='d2', b_row=b, c_row=c) - d3 = D(name='d3', b_row=b, c_row=c) + d1 = D(name='d1', b_row=b, c_row=c) # noqa + d2 = D(name='d2', b_row=b, c_row=c) # noqa + d3 = D(name='d3', b_row=b, c_row=c) # noqa session.add(a) session.add(b) session.flush() @@ -230,7 +241,9 @@ class DependencyTwoParentTest(fixtures.MappedTest): session.delete(c) session.flush() + class M2ODontOverwriteFKTest(fixtures.MappedTest): + @classmethod def define_tables(cls, metadata): Table( @@ -248,13 +261,13 @@ class M2ODontOverwriteFKTest(fixtures.MappedTest): class A(fixtures.BasicEntity): pass + class B(fixtures.BasicEntity): pass - mapper(A, a, properties={ - 'b': relationship(B, uselist=uselist) - }) + 'b': relationship(B, uselist=uselist) + }) mapper(B, b) return A, B @@ -271,7 +284,6 @@ class M2ODontOverwriteFKTest(fixtures.MappedTest): sess.commit() # test that was broken by #3060 - from sqlalchemy.orm import joinedload a1 = sess.query(A).options(joinedload("b")).first() a1.bid = b1.id sess.flush() @@ -340,8 +352,8 @@ class M2ODontOverwriteFKTest(fixtures.MappedTest): assert a1.bid is not None - class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL): + """Tests the ultimate join condition, a single column that points to itself, e.g. within a SQL function or similar. The test is against a materialized path setup. @@ -365,28 +377,28 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL): @classmethod def define_tables(cls, metadata): Table('entity', metadata, - Column('path', String(100), primary_key=True) - ) + Column('path', String(100), primary_key=True) + ) @classmethod def setup_classes(cls): class Entity(cls.Basic): + def __init__(self, path): self.path = path - def _descendants_fixture(self, data=True): Entity = self.classes.Entity entity = self.tables.entity m = mapper(Entity, entity, properties={ - "descendants": relationship(Entity, - primaryjoin= - remote(foreign(entity.c.path)).like( - entity.c.path.concat('/%')), - viewonly=True, - order_by=entity.c.path) - }) + "descendants": relationship( + Entity, + primaryjoin=remote(foreign(entity.c.path)).like( + entity.c.path.concat('/%')), + viewonly=True, + order_by=entity.c.path) + }) configure_mappers() assert m.get_property("descendants").direction is ONETOMANY if data: @@ -397,13 +409,13 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL): entity = self.tables.entity m = mapper(Entity, entity, properties={ - "anscestors": relationship(Entity, - primaryjoin= - entity.c.path.like( - remote(foreign(entity.c.path)).concat('/%')), - viewonly=True, - order_by=entity.c.path) - }) + "anscestors": relationship( + Entity, + primaryjoin=entity.c.path.like( + remote(foreign(entity.c.path)).concat('/%')), + viewonly=True, + order_by=entity.c.path) + }) configure_mappers() assert m.get_property("anscestors").direction is ONETOMANY if data: @@ -447,7 +459,7 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL): sess = self._descendants_fixture() Entity = self.classes.Entity e1 = sess.query(Entity).filter_by(path="/foo").\ - options(joinedload(Entity.descendants)).first() + options(joinedload(Entity.descendants)).first() eq_( [e.path for e in e1.descendants], @@ -459,7 +471,7 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL): sess = self._descendants_fixture() Entity = self.classes.Entity e1 = sess.query(Entity).filter_by(path="/foo").\ - options(subqueryload(Entity.descendants)).first() + options(subqueryload(Entity.descendants)).first() eq_( [e.path for e in e1.descendants], @@ -471,7 +483,7 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL): sess = self._anscestors_fixture() Entity = self.classes.Entity e1 = sess.query(Entity).filter_by(path="/foo/bar2/bat1").\ - options(joinedload(Entity.anscestors)).first() + options(joinedload(Entity.anscestors)).first() eq_( [e.path for e in e1.anscestors], ["/foo", "/foo/bar2"] @@ -488,8 +500,8 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL): ) - class CompositeSelfRefFKTest(fixtures.MappedTest): + """Tests a composite FK where, in the relationship(), one col points to itself in the same table. @@ -515,7 +527,7 @@ class CompositeSelfRefFKTest(fixtures.MappedTest): def define_tables(cls, metadata): Table('company_t', metadata, Column('company_id', Integer, primary_key=True, - test_needs_autoincrement=True), + test_needs_autoincrement=True), Column('name', String(30))) Table('employee_t', metadata, @@ -533,10 +545,12 @@ class CompositeSelfRefFKTest(fixtures.MappedTest): @classmethod def setup_classes(cls): class Company(cls.Basic): + def __init__(self, name): self.name = name class Employee(cls.Basic): + def __init__(self, name, company, emp_id, reports_to=None): self.name = name self.company = company @@ -545,106 +559,115 @@ class CompositeSelfRefFKTest(fixtures.MappedTest): def test_explicit(self): Employee, Company, employee_t, company_t = (self.classes.Employee, - self.classes.Company, - self.tables.employee_t, - self.tables.company_t) + self.classes.Company, + self.tables.employee_t, + self.tables.company_t) mapper(Company, company_t) - mapper(Employee, employee_t, properties= { - 'company':relationship(Company, - primaryjoin=employee_t.c.company_id== - company_t.c.company_id, - backref='employees'), - 'reports_to':relationship(Employee, primaryjoin= - sa.and_( - employee_t.c.emp_id==employee_t.c.reports_to_id, - employee_t.c.company_id==employee_t.c.company_id - ), + mapper(Employee, employee_t, properties={ + 'company': relationship(Company, + primaryjoin=employee_t.c.company_id == + company_t.c.company_id, + backref='employees'), + 'reports_to': relationship(Employee, primaryjoin=sa.and_( + employee_t.c.emp_id == employee_t.c.reports_to_id, + employee_t.c.company_id == employee_t.c.company_id + ), remote_side=[employee_t.c.emp_id, employee_t.c.company_id], - foreign_keys=[employee_t.c.reports_to_id, employee_t.c.company_id], + foreign_keys=[ + employee_t.c.reports_to_id, employee_t.c.company_id], backref=backref('employees', - foreign_keys=[employee_t.c.reports_to_id, - employee_t.c.company_id])) + foreign_keys=[employee_t.c.reports_to_id, + employee_t.c.company_id])) }) self._test() def test_implicit(self): Employee, Company, employee_t, company_t = (self.classes.Employee, - self.classes.Company, - self.tables.employee_t, - self.tables.company_t) + self.classes.Company, + self.tables.employee_t, + self.tables.company_t) mapper(Company, company_t) - mapper(Employee, employee_t, properties= { - 'company':relationship(Company, backref='employees'), - 'reports_to':relationship(Employee, + mapper(Employee, employee_t, properties={ + 'company': relationship(Company, backref='employees'), + 'reports_to': relationship( + Employee, remote_side=[employee_t.c.emp_id, employee_t.c.company_id], foreign_keys=[employee_t.c.reports_to_id, - employee_t.c.company_id], - backref=backref('employees', foreign_keys= - [employee_t.c.reports_to_id, employee_t.c.company_id]) - ) + employee_t.c.company_id], + backref=backref( + 'employees', + foreign_keys=[ + employee_t.c.reports_to_id, employee_t.c.company_id]) + ) }) self._test() def test_very_implicit(self): Employee, Company, employee_t, company_t = (self.classes.Employee, - self.classes.Company, - self.tables.employee_t, - self.tables.company_t) + self.classes.Company, + self.tables.employee_t, + self.tables.company_t) mapper(Company, company_t) - mapper(Employee, employee_t, properties= { - 'company':relationship(Company, backref='employees'), - 'reports_to':relationship(Employee, + mapper(Employee, employee_t, properties={ + 'company': relationship(Company, backref='employees'), + 'reports_to': relationship( + Employee, remote_side=[employee_t.c.emp_id, employee_t.c.company_id], backref='employees' - ) + ) }) self._test() def test_very_explicit(self): Employee, Company, employee_t, company_t = (self.classes.Employee, - self.classes.Company, - self.tables.employee_t, - self.tables.company_t) + self.classes.Company, + self.tables.employee_t, + self.tables.company_t) mapper(Company, company_t) - mapper(Employee, employee_t, properties= { - 'company':relationship(Company, backref='employees'), - 'reports_to':relationship(Employee, - _local_remote_pairs = [ - (employee_t.c.reports_to_id, employee_t.c.emp_id), - (employee_t.c.company_id, employee_t.c.company_id) + mapper(Employee, employee_t, properties={ + 'company': relationship(Company, backref='employees'), + 'reports_to': relationship( + Employee, + _local_remote_pairs=[ + (employee_t.c.reports_to_id, employee_t.c.emp_id), + (employee_t.c.company_id, employee_t.c.company_id) ], - foreign_keys=[employee_t.c.reports_to_id, - employee_t.c.company_id], - backref=backref('employees', foreign_keys= - [employee_t.c.reports_to_id, employee_t.c.company_id]) - ) + foreign_keys=[ + employee_t.c.reports_to_id, + employee_t.c.company_id], + backref=backref( + 'employees', + foreign_keys=[ + employee_t.c.reports_to_id, employee_t.c.company_id]) + ) }) self._test() def test_annotated(self): Employee, Company, employee_t, company_t = (self.classes.Employee, - self.classes.Company, - self.tables.employee_t, - self.tables.company_t) + self.classes.Company, + self.tables.employee_t, + self.tables.company_t) mapper(Company, company_t) - mapper(Employee, employee_t, properties= { - 'company':relationship(Company, backref='employees'), - 'reports_to':relationship(Employee, + mapper(Employee, employee_t, properties={ + 'company': relationship(Company, backref='employees'), + 'reports_to': relationship( + Employee, primaryjoin=sa.and_( - remote(employee_t.c.emp_id)==employee_t.c.reports_to_id, - remote(employee_t.c.company_id)==employee_t.c.company_id + remote(employee_t.c.emp_id) == employee_t.c.reports_to_id, + remote(employee_t.c.company_id) == employee_t.c.company_id ), backref=backref('employees') - ) + ) }) self._test() @@ -665,7 +688,7 @@ class CompositeSelfRefFKTest(fixtures.MappedTest): set([ (employee_t.c.company_id, employee_t.c.company_id), (employee_t.c.emp_id, employee_t.c.reports_to_id), - ]) + ]) ) eq_( Employee.employees.property.remote_side, @@ -676,7 +699,7 @@ class CompositeSelfRefFKTest(fixtures.MappedTest): set([ (employee_t.c.company_id, employee_t.c.company_id), (employee_t.c.reports_to_id, employee_t.c.emp_id), - ]) + ]) ) def _setup_data(self, sess): @@ -686,12 +709,12 @@ class CompositeSelfRefFKTest(fixtures.MappedTest): c2 = Company('c2') e1 = Employee('emp1', c1, 1) - e2 = Employee('emp2', c1, 2, e1) + e2 = Employee('emp2', c1, 2, e1) # noqa e3 = Employee('emp3', c1, 3, e1) - e4 = Employee('emp4', c1, 4, e3) + e4 = Employee('emp4', c1, 4, e3) # noqa e5 = Employee('emp5', c2, 1) - e6 = Employee('emp6', c2, 2, e5) - e7 = Employee('emp7', c2, 3, e5) + e6 = Employee('emp6', c2, 2, e5) # noqa + e7 = Employee('emp7', c2, 3, e5) # noqa sess.add_all((c1, c2)) sess.commit() @@ -711,64 +734,66 @@ class CompositeSelfRefFKTest(fixtures.MappedTest): assert test_e5.name == 'emp5', test_e5.name assert [x.name for x in test_e1.employees] == ['emp2', 'emp3'] assert sess.query(Employee).\ - get([c1.company_id, 3]).reports_to.name == 'emp1' + get([c1.company_id, 3]).reports_to.name == 'emp1' assert sess.query(Employee).\ - get([c2.company_id, 3]).reports_to.name == 'emp5' + get([c2.company_id, 3]).reports_to.name == 'emp5' def _test_join_aliasing(self, sess): Employee, Company = self.classes.Employee, self.classes.Company eq_( - [n for n, in sess.query(Employee.name).\ - join(Employee.reports_to, aliased=True).\ - filter_by(name='emp5').\ - reset_joinpoint().\ - order_by(Employee.name)], + [n for n, in sess.query(Employee.name). + join(Employee.reports_to, aliased=True). + filter_by(name='emp5'). + reset_joinpoint(). + order_by(Employee.name)], ['emp6', 'emp7'] ) - class CompositeJoinPartialFK(fixtures.MappedTest, AssertsCompiledSQL): __dialect__ = 'default' + @classmethod def define_tables(cls, metadata): Table("parent", metadata, - Column('x', Integer, primary_key=True), - Column('y', Integer, primary_key=True), - Column('z', Integer), - ) + Column('x', Integer, primary_key=True), + Column('y', Integer, primary_key=True), + Column('z', Integer), + ) Table("child", metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('x', Integer), - Column('y', Integer), - Column('z', Integer), - # note 'z' is not here - sa.ForeignKeyConstraint( - ["x", "y"], - ["parent.x", "parent.y"] - ) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('x', Integer), + Column('y', Integer), + Column('z', Integer), + # note 'z' is not here + sa.ForeignKeyConstraint( + ["x", "y"], + ["parent.x", "parent.y"] + ) + ) + @classmethod def setup_mappers(cls): parent, child = cls.tables.parent, cls.tables.child + class Parent(cls.Comparable): pass class Child(cls.Comparable): pass mapper(Parent, parent, properties={ - 'children':relationship(Child, primaryjoin=and_( - parent.c.x==child.c.x, - parent.c.y==child.c.y, - parent.c.z==child.c.z, + 'children': relationship(Child, primaryjoin=and_( + parent.c.x == child.c.x, + parent.c.y == child.c.y, + parent.c.z == child.c.z, )) }) mapper(Child, child) def test_joins_fully(self): Parent, Child = self.classes.Parent, self.classes.Child - s = Session() + self.assert_compile( Parent.children.property.strategy._lazywhere, ":param_1 = child.x AND :param_2 = child.y AND :param_3 = child.z" @@ -776,19 +801,20 @@ class CompositeJoinPartialFK(fixtures.MappedTest, AssertsCompiledSQL): class SynonymsAsFKsTest(fixtures.MappedTest): + """Syncrules on foreign keys that are also primary""" @classmethod def define_tables(cls, metadata): Table("tableA", metadata, - Column("id",Integer,primary_key=True, - test_needs_autoincrement=True), - Column("foo",Integer,), + Column("id", Integer, primary_key=True, + test_needs_autoincrement=True), + Column("foo", Integer,), test_needs_fk=True) - Table("tableB",metadata, - Column("id",Integer,primary_key=True, - test_needs_autoincrement=True), + Table("tableB", metadata, + Column("id", Integer, primary_key=True, + test_needs_autoincrement=True), Column("_a_id", Integer, key='a_id', primary_key=True), test_needs_fk=True) @@ -798,6 +824,7 @@ class SynonymsAsFKsTest(fixtures.MappedTest): pass class B(cls.Basic): + @property def a_id(self): return self._a_id @@ -832,18 +859,19 @@ class SynonymsAsFKsTest(fixtures.MappedTest): class FKsAsPksTest(fixtures.MappedTest): + """Syncrules on foreign keys that are also primary""" @classmethod def define_tables(cls, metadata): Table("tableA", metadata, - Column("id",Integer,primary_key=True, - test_needs_autoincrement=True), - Column("foo",Integer,), + Column("id", Integer, primary_key=True, + test_needs_autoincrement=True), + Column("foo", Integer,), test_needs_fk=True) - Table("tableB",metadata, - Column("id",Integer,ForeignKey("tableA.id"),primary_key=True), + Table("tableB", metadata, + Column("id", Integer, ForeignKey("tableA.id"), primary_key=True), test_needs_fk=True) @classmethod @@ -863,9 +891,8 @@ class FKsAsPksTest(fixtures.MappedTest): self.classes.B, self.tables.tableA) - mapper(A, tableA, properties={ - 'b':relationship(B, cascade="all,delete-orphan", uselist=False)}) + 'b': relationship(B, cascade="all,delete-orphan", uselist=False)}) mapper(B, tableB) configure_mappers() @@ -890,7 +917,7 @@ class FKsAsPksTest(fixtures.MappedTest): self.tables.tableA) mapper(A, tableA, properties={ - 'bs':relationship(B, cascade="save-update")}) + 'bs': relationship(B, cascade="save-update")}) mapper(B, tableB) a1 = A() @@ -915,7 +942,7 @@ class FKsAsPksTest(fixtures.MappedTest): self.tables.tableA) mapper(B, tableB, properties={ - 'a':relationship(A, cascade="save-update")}) + 'a': relationship(A, cascade="save-update")}) mapper(A, tableA) b1 = B() @@ -938,7 +965,8 @@ class FKsAsPksTest(fixtures.MappedTest): A, tableA = self.classes.A, self.tables.tableA # postgresql cant handle a nullable PK column...? - tableC = Table('tablec', tableA.metadata, + tableC = Table( + 'tablec', tableA.metadata, Column('id', Integer, primary_key=True), Column('a_id', Integer, ForeignKey('tableA.id'), primary_key=True, autoincrement=False, nullable=True)) @@ -947,7 +975,7 @@ class FKsAsPksTest(fixtures.MappedTest): class C(fixtures.BasicEntity): pass mapper(C, tableC, properties={ - 'a':relationship(A, cascade="save-update") + 'a': relationship(A, cascade="save-update") }) mapper(A, tableA) @@ -968,12 +996,11 @@ class FKsAsPksTest(fixtures.MappedTest): self.classes.B, self.tables.tableA) - for cascade in ("save-update, delete", #"save-update, delete-orphan", "save-update, delete, delete-orphan"): mapper(B, tableB, properties={ - 'a':relationship(A, cascade=cascade, single_parent=True) + 'a': relationship(A, cascade=cascade, single_parent=True) }) mapper(A, tableA) @@ -999,12 +1026,11 @@ class FKsAsPksTest(fixtures.MappedTest): self.classes.B, self.tables.tableA) - for cascade in ("save-update, delete", #"save-update, delete-orphan", "save-update, delete, delete-orphan"): mapper(A, tableA, properties={ - 'bs':relationship(B, cascade=cascade) + 'bs': relationship(B, cascade=cascade) }) mapper(B, tableB) @@ -1029,7 +1055,7 @@ class FKsAsPksTest(fixtures.MappedTest): self.tables.tableA) mapper(A, tableA, properties={ - 'bs':relationship(B, cascade="none")}) + 'bs': relationship(B, cascade="none")}) mapper(B, tableB) a1 = A() @@ -1054,7 +1080,7 @@ class FKsAsPksTest(fixtures.MappedTest): self.tables.tableA) mapper(B, tableB, properties={ - 'a':relationship(A, cascade="none")}) + 'a': relationship(A, cascade="none")}) mapper(A, tableA) b1 = B() @@ -1070,39 +1096,42 @@ class FKsAsPksTest(fixtures.MappedTest): assert a1 not in sess assert b1 not in sess + class UniqueColReferenceSwitchTest(fixtures.MappedTest): + """test a relationship based on a primary join against a unique non-pk column""" @classmethod def define_tables(cls, metadata): Table("table_a", metadata, - Column("id", Integer, primary_key=True, - test_needs_autoincrement=True), - Column("ident", String(10), nullable=False, - unique=True), - ) + Column("id", Integer, primary_key=True, + test_needs_autoincrement=True), + Column("ident", String(10), nullable=False, + unique=True), + ) Table("table_b", metadata, - Column("id", Integer, primary_key=True, - test_needs_autoincrement=True), - Column("a_ident", String(10), - ForeignKey('table_a.ident'), - nullable=False), - ) + Column("id", Integer, primary_key=True, + test_needs_autoincrement=True), + Column("a_ident", String(10), + ForeignKey('table_a.ident'), + nullable=False), + ) @classmethod def setup_classes(cls): class A(cls.Comparable): pass + class B(cls.Comparable): pass def test_switch_parent(self): A, B, table_b, table_a = (self.classes.A, - self.classes.B, - self.tables.table_b, - self.tables.table_a) + self.classes.B, + self.tables.table_b, + self.tables.table_a) mapper(A, table_a) mapper(B, table_b, properties={"a": relationship(A, backref="bs")}) @@ -1122,7 +1151,9 @@ class UniqueColReferenceSwitchTest(fixtures.MappedTest): session.delete(a1) session.flush() + class RelationshipToSelectableTest(fixtures.MappedTest): + """Test a map to a select that relates to a map to the table.""" @classmethod @@ -1142,33 +1173,40 @@ class RelationshipToSelectableTest(fixtures.MappedTest): class Container(fixtures.BasicEntity): pass + class LineItem(fixtures.BasicEntity): pass container_select = sa.select( [items.c.policyNum, items.c.policyEffDate, items.c.type], distinct=True, - ).alias('container_select') + ).alias('container_select') mapper(LineItem, items) - mapper(Container, - container_select, - order_by=sa.asc(container_select.c.type), - properties=dict( - lineItems=relationship(LineItem, - lazy='select', - cascade='all, delete-orphan', - order_by=sa.asc(items.c.id), - primaryjoin=sa.and_( - container_select.c.policyNum==items.c.policyNum, - container_select.c.policyEffDate== - items.c.policyEffDate, - container_select.c.type==items.c.type), - foreign_keys=[ - items.c.policyNum, - items.c.policyEffDate, - items.c.type]))) + mapper( + Container, + container_select, + order_by=sa.asc(container_select.c.type), + properties=dict( + lineItems=relationship( + LineItem, + lazy='select', + cascade='all, delete-orphan', + order_by=sa.asc(items.c.id), + primaryjoin=sa.and_( + container_select.c.policyNum == items.c.policyNum, + container_select.c.policyEffDate == + items.c.policyEffDate, + container_select.c.type == items.c.type), + foreign_keys=[ + items.c.policyNum, + items.c.policyEffDate, + items.c.type + ] + ) + ) + ) session = create_session() con = Container() @@ -1189,7 +1227,9 @@ class RelationshipToSelectableTest(fixtures.MappedTest): for old, new in zip(con.lineItems, newcon.lineItems): eq_(old.id, new.id) + class FKEquatedToConstantTest(fixtures.MappedTest): + """test a relationship with a non-column entity in the primary join, is not viewonly, and also has the non-column's clause mentioned in the foreign keys list. @@ -1199,31 +1239,32 @@ class FKEquatedToConstantTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('tags', metadata, Column("id", Integer, primary_key=True, - test_needs_autoincrement=True), - Column("data", String(50)), - ) + test_needs_autoincrement=True), + Column("data", String(50)), + ) Table('tag_foo', metadata, - Column("id", Integer, primary_key=True, - test_needs_autoincrement=True), - Column('tagid', Integer), - Column("data", String(50)), - ) + Column("id", Integer, primary_key=True, + test_needs_autoincrement=True), + Column('tagid', Integer), + Column("data", String(50)), + ) def test_basic(self): tag_foo, tags = self.tables.tag_foo, self.tables.tags class Tag(fixtures.ComparableEntity): pass + class TagInstance(fixtures.ComparableEntity): pass mapper(Tag, tags, properties={ - 'foo':relationship(TagInstance, - primaryjoin=sa.and_(tag_foo.c.data=='iplc_case', - tag_foo.c.tagid==tags.c.id), - foreign_keys=[tag_foo.c.tagid, tag_foo.c.data], - ), + 'foo': relationship( + TagInstance, + primaryjoin=sa.and_(tag_foo.c.data == 'iplc_case', + tag_foo.c.tagid == tags.c.id), + foreign_keys=[tag_foo.c.tagid, tag_foo.c.data]), }) mapper(TagInstance, tag_foo) @@ -1248,41 +1289,43 @@ class FKEquatedToConstantTest(fixtures.MappedTest): [TagInstance(data='iplc_case'), TagInstance(data='not_iplc_case')] ) + class BackrefPropagatesForwardsArgs(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('users', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('name', String(50)) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('name', String(50)) + ) Table('addresses', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('user_id', Integer), - Column('email', String(50)) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('user_id', Integer), + Column('email', String(50)) + ) @classmethod def setup_classes(cls): class User(cls.Comparable): pass + class Address(cls.Comparable): pass def test_backref(self): User, Address, users, addresses = (self.classes.User, - self.classes.Address, - self.tables.users, - self.tables.addresses) - + self.classes.Address, + self.tables.users, + self.tables.addresses) mapper(User, users, properties={ - 'addresses':relationship(Address, - primaryjoin=addresses.c.user_id==users.c.id, - foreign_keys=addresses.c.user_id, - backref='user') + 'addresses': relationship( + Address, + primaryjoin=addresses.c.user_id == users.c.id, + foreign_keys=addresses.c.user_id, + backref='user') }) mapper(Address, addresses) @@ -1292,9 +1335,11 @@ class BackrefPropagatesForwardsArgs(fixtures.MappedTest): sess.commit() eq_(sess.query(Address).all(), [ Address(email='a1', user=User(name='u1')) - ]) + ]) + class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest): + """test ambiguous joins due to FKs on both sides treated as self-referential. @@ -1307,25 +1352,28 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - subscriber_table = Table('subscriber', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - ) - - address_table = Table('address', - metadata, - Column('subscriber_id', Integer, - ForeignKey('subscriber.id'), primary_key=True), - Column('type', String(1), primary_key=True), - ) + Table( + 'subscriber', metadata, + Column( + 'id', Integer, primary_key=True, + test_needs_autoincrement=True)) + + Table( + 'address', metadata, + Column( + 'subscriber_id', Integer, + ForeignKey('subscriber.id'), primary_key=True), + Column('type', String(1), primary_key=True), + ) @classmethod def setup_mappers(cls): subscriber, address = cls.tables.subscriber, cls.tables.address - subscriber_and_address = subscriber.join(address, - and_(address.c.subscriber_id==subscriber.c.id, - address.c.type.in_(['A', 'B', 'C']))) + subscriber_and_address = subscriber.join( + address, + and_(address.c.subscriber_id == subscriber.c.id, + address.c.type.in_(['A', 'B', 'C']))) class Address(cls.Comparable): pass @@ -1336,10 +1384,10 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest): mapper(Address, address) mapper(Subscriber, subscriber_and_address, properties={ - 'id':[subscriber.c.id, address.c.subscriber_id], - 'addresses' : relationship(Address, - backref=backref("customer")) - }) + 'id': [subscriber.c.id, address.c.subscriber_id], + 'addresses': relationship(Address, + backref=backref("customer")) + }) def test_mapping(self): Subscriber, Address = self.classes.Subscriber, self.classes.Address @@ -1349,11 +1397,11 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest): assert Address.customer.property.direction is MANYTOONE s1 = Subscriber(type='A', - addresses = [ - Address(type='D'), - Address(type='E'), - ] - ) + addresses=[ + Address(type='D'), + Address(type='E'), + ] + ) a1 = Address(type='B', customer=Subscriber(type='C')) assert s1.addresses[0].customer is s1 @@ -1375,22 +1423,23 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest): class ManualBackrefTest(_fixtures.FixtureTest): + """Test explicit relationships that are backrefs to each other.""" run_inserts = None def test_o2m(self): users, Address, addresses, User = (self.tables.users, - self.classes.Address, - self.tables.addresses, - self.classes.User) + self.classes.Address, + self.tables.addresses, + self.classes.User) mapper(User, users, properties={ - 'addresses':relationship(Address, back_populates='user') + 'addresses': relationship(Address, back_populates='user') }) mapper(Address, addresses, properties={ - 'user':relationship(User, back_populates='addresses') + 'user': relationship(User, back_populates='addresses') }) sess = create_session() @@ -1409,52 +1458,56 @@ class ManualBackrefTest(_fixtures.FixtureTest): def test_invalid_key(self): users, Address, addresses, User = (self.tables.users, - self.classes.Address, - self.tables.addresses, - self.classes.User) + self.classes.Address, + self.tables.addresses, + self.classes.User) mapper(User, users, properties={ - 'addresses':relationship(Address, back_populates='userr') + 'addresses': relationship(Address, back_populates='userr') }) mapper(Address, addresses, properties={ - 'user':relationship(User, back_populates='addresses') + 'user': relationship(User, back_populates='addresses') }) assert_raises(sa.exc.InvalidRequestError, configure_mappers) def test_invalid_target(self): - addresses, Dingaling, User, dingalings, Address, users = (self.tables.addresses, - self.classes.Dingaling, - self.classes.User, - self.tables.dingalings, - self.classes.Address, - self.tables.users) + addresses, Dingaling, User, dingalings, Address, users = ( + self.tables.addresses, + self.classes.Dingaling, + self.classes.User, + self.tables.dingalings, + self.classes.Address, + self.tables.users) mapper(User, users, properties={ - 'addresses':relationship(Address, back_populates='dingaling'), + 'addresses': relationship(Address, back_populates='dingaling'), }) mapper(Dingaling, dingalings) mapper(Address, addresses, properties={ - 'dingaling':relationship(Dingaling) + 'dingaling': relationship(Dingaling) }) assert_raises_message(sa.exc.ArgumentError, - r"reverse_property 'dingaling' on relationship " - "User.addresses references " - "relationship Address.dingaling, which does not " - "reference mapper Mapper\|User\|users", - configure_mappers) + r"reverse_property 'dingaling' on relationship " + "User.addresses references " + "relationship Address.dingaling, which does not " + "reference mapper Mapper\|User\|users", + configure_mappers) + class JoinConditionErrorTest(fixtures.TestBase): def test_clauseelement_pj(self): from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() + class C1(Base): __tablename__ = 'c1' id = Column('id', Integer, primary_key=True) + class C2(Base): __tablename__ = 'c2' id = Column('id', Integer, primary_key=True) @@ -1466,39 +1519,42 @@ class JoinConditionErrorTest(fixtures.TestBase): def test_clauseelement_pj_false(self): from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() + class C1(Base): __tablename__ = 'c1' id = Column('id', Integer, primary_key=True) + class C2(Base): __tablename__ = 'c2' id = Column('id', Integer, primary_key=True) c1id = Column('c1id', Integer, ForeignKey('c1.id')) - c2 = relationship(C1, primaryjoin="x"=="y") + c2 = relationship(C1, primaryjoin="x" == "y") assert_raises(sa.exc.ArgumentError, configure_mappers) def test_only_column_elements(self): m = MetaData() t1 = Table('t1', m, - Column('id', Integer, primary_key=True), - Column('foo_id', Integer, ForeignKey('t2.id')), - ) + Column('id', Integer, primary_key=True), + Column('foo_id', Integer, ForeignKey('t2.id')), + ) t2 = Table('t2', m, - Column('id', Integer, primary_key=True), - ) + Column('id', Integer, primary_key=True), + ) + class C1(object): pass + class C2(object): pass - mapper(C1, t1, properties={'c2':relationship(C2, - primaryjoin=t1.join(t2))}) + mapper(C1, t1, properties={ + 'c2': relationship(C2, primaryjoin=t1.join(t2))}) mapper(C2, t2) assert_raises(sa.exc.ArgumentError, configure_mappers) def test_invalid_string_args(self): from sqlalchemy.ext.declarative import declarative_base - from sqlalchemy import util for argname, arg in [ ('remote_side', ['c1.id']), @@ -1508,8 +1564,9 @@ class JoinConditionErrorTest(fixtures.TestBase): ('order_by', ['id']), ]: clear_mappers() - kw = {argname:arg} + kw = {argname: arg} Base = declarative_base() + class C1(Base): __tablename__ = 'c1' id = Column('id', Integer, primary_key=True) @@ -1527,51 +1584,52 @@ class JoinConditionErrorTest(fixtures.TestBase): (argname, arg[0], type(arg[0])), configure_mappers) - def test_fk_error_not_raised_unrelated(self): m = MetaData() t1 = Table('t1', m, - Column('id', Integer, primary_key=True), - Column('foo_id', Integer, ForeignKey('t2.nonexistent_id')), - ) - t2 = Table('t2', m, - Column('id', Integer, primary_key=True), - ) + Column('id', Integer, primary_key=True), + Column('foo_id', Integer, ForeignKey('t2.nonexistent_id')), + ) + t2 = Table('t2', m, # noqa + Column('id', Integer, primary_key=True), + ) t3 = Table('t3', m, - Column('id', Integer, primary_key=True), - Column('t1id', Integer, ForeignKey('t1.id')) - ) + Column('id', Integer, primary_key=True), + Column('t1id', Integer, ForeignKey('t1.id')) + ) class C1(object): pass + class C2(object): pass - mapper(C1, t1, properties={'c2':relationship(C2)}) + mapper(C1, t1, properties={'c2': relationship(C2)}) mapper(C2, t3) - assert C1.c2.property.primaryjoin.compare(t1.c.id==t3.c.t1id) + assert C1.c2.property.primaryjoin.compare(t1.c.id == t3.c.t1id) def test_join_error_raised(self): m = MetaData() t1 = Table('t1', m, - Column('id', Integer, primary_key=True), - ) - t2 = Table('t2', m, - Column('id', Integer, primary_key=True), - ) + Column('id', Integer, primary_key=True), + ) + t2 = Table('t2', m, # noqa + Column('id', Integer, primary_key=True), + ) t3 = Table('t3', m, - Column('id', Integer, primary_key=True), - Column('t1id', Integer) - ) + Column('id', Integer, primary_key=True), + Column('t1id', Integer) + ) class C1(object): pass + class C2(object): pass - mapper(C1, t1, properties={'c2':relationship(C2)}) + mapper(C1, t1, properties={'c2': relationship(C2)}) mapper(C2, t3) assert_raises(sa.exc.ArgumentError, configure_mappers) @@ -1579,7 +1637,9 @@ class JoinConditionErrorTest(fixtures.TestBase): def teardown(self): clear_mappers() + class TypeMatchTest(fixtures.MappedTest): + """test errors raised when trying to add items whose type is not handled by a relationship""" @@ -1587,33 +1647,38 @@ class TypeMatchTest(fixtures.MappedTest): def define_tables(cls, metadata): Table("a", metadata, Column('aid', Integer, primary_key=True, - test_needs_autoincrement=True), + test_needs_autoincrement=True), Column('adata', String(30))) Table("b", metadata, - Column('bid', Integer, primary_key=True, - test_needs_autoincrement=True), - Column("a_id", Integer, ForeignKey("a.aid")), - Column('bdata', String(30))) + Column('bid', Integer, primary_key=True, + test_needs_autoincrement=True), + Column("a_id", Integer, ForeignKey("a.aid")), + Column('bdata', String(30))) Table("c", metadata, Column('cid', Integer, primary_key=True, - test_needs_autoincrement=True), + test_needs_autoincrement=True), Column("b_id", Integer, ForeignKey("b.bid")), Column('cdata', String(30))) Table("d", metadata, Column('did', Integer, primary_key=True, - test_needs_autoincrement=True), + test_needs_autoincrement=True), Column("a_id", Integer, ForeignKey("a.aid")), Column('ddata', String(30))) def test_o2m_oncascade(self): a, c, b = (self.tables.a, - self.tables.c, - self.tables.b) + self.tables.c, + self.tables.b) + + class A(fixtures.BasicEntity): + pass + + class B(fixtures.BasicEntity): + pass - class A(fixtures.BasicEntity): pass - class B(fixtures.BasicEntity): pass - class C(fixtures.BasicEntity): pass - mapper(A, a, properties={'bs':relationship(B)}) + class C(fixtures.BasicEntity): + pass + mapper(A, a, properties={'bs': relationship(B)}) mapper(B, b) mapper(C, c) @@ -1633,13 +1698,18 @@ class TypeMatchTest(fixtures.MappedTest): def test_o2m_onflush(self): a, c, b = (self.tables.a, - self.tables.c, - self.tables.b) + self.tables.c, + self.tables.b) + + class A(fixtures.BasicEntity): + pass - class A(fixtures.BasicEntity): pass - class B(fixtures.BasicEntity): pass - class C(fixtures.BasicEntity): pass - mapper(A, a, properties={'bs':relationship(B, cascade="none")}) + class B(fixtures.BasicEntity): + pass + + class C(fixtures.BasicEntity): + pass + mapper(A, a, properties={'bs': relationship(B, cascade="none")}) mapper(B, b) mapper(C, c) @@ -1653,18 +1723,23 @@ class TypeMatchTest(fixtures.MappedTest): sess.add(b1) sess.add(c1) assert_raises_message(sa.orm.exc.FlushError, - "Attempting to flush an item", - sess.flush) + "Attempting to flush an item", + sess.flush) def test_o2m_nopoly_onflush(self): a, c, b = (self.tables.a, - self.tables.c, - self.tables.b) + self.tables.c, + self.tables.b) - class A(fixtures.BasicEntity): pass - class B(fixtures.BasicEntity): pass - class C(B): pass - mapper(A, a, properties={'bs':relationship(B, cascade="none")}) + class A(fixtures.BasicEntity): + pass + + class B(fixtures.BasicEntity): + pass + + class C(B): + pass + mapper(A, a, properties={'bs': relationship(B, cascade="none")}) mapper(B, b) mapper(C, c, inherits=B) @@ -1678,20 +1753,25 @@ class TypeMatchTest(fixtures.MappedTest): sess.add(b1) sess.add(c1) assert_raises_message(sa.orm.exc.FlushError, - "Attempting to flush an item", - sess.flush) + "Attempting to flush an item", + sess.flush) def test_m2o_nopoly_onflush(self): a, b, d = (self.tables.a, - self.tables.b, - self.tables.d) + self.tables.b, + self.tables.d) - class A(fixtures.BasicEntity): pass - class B(A): pass - class D(fixtures.BasicEntity): pass + class A(fixtures.BasicEntity): + pass + + class B(A): + pass + + class D(fixtures.BasicEntity): + pass mapper(A, a) mapper(B, b, inherits=A) - mapper(D, d, properties={"a":relationship(A, cascade="none")}) + mapper(D, d, properties={"a": relationship(A, cascade="none")}) b1 = B() d1 = D() d1.a = b1 @@ -1699,27 +1779,33 @@ class TypeMatchTest(fixtures.MappedTest): sess.add(b1) sess.add(d1) assert_raises_message(sa.orm.exc.FlushError, - "Attempting to flush an item", - sess.flush) + "Attempting to flush an item", + sess.flush) def test_m2o_oncascade(self): a, b, d = (self.tables.a, - self.tables.b, - self.tables.d) + self.tables.b, + self.tables.d) + + class A(fixtures.BasicEntity): + pass + + class B(fixtures.BasicEntity): + pass - class A(fixtures.BasicEntity): pass - class B(fixtures.BasicEntity): pass - class D(fixtures.BasicEntity): pass + class D(fixtures.BasicEntity): + pass mapper(A, a) mapper(B, b) - mapper(D, d, properties={"a":relationship(A)}) + mapper(D, d, properties={"a": relationship(A)}) b1 = B() d1 = D() d1.a = b1 sess = create_session() assert_raises_message(AssertionError, - "doesn't handle objects of type", - sess.add, d1) + "doesn't handle objects of type", + sess.add, d1) + class TypedAssociationTable(fixtures.MappedTest): @@ -1727,8 +1813,10 @@ class TypedAssociationTable(fixtures.MappedTest): def define_tables(cls, metadata): class MySpecialType(sa.types.TypeDecorator): impl = String + def process_bind_param(self, value, dialect): return "lala" + value + def process_result_value(self, value, dialect): return value[4:] @@ -1746,15 +1834,17 @@ class TypedAssociationTable(fixtures.MappedTest): """Many-to-many tables with special types for candidate keys.""" t2, t3, t1 = (self.tables.t2, - self.tables.t3, - self.tables.t1) + self.tables.t3, + self.tables.t1) + class T1(fixtures.BasicEntity): + pass - class T1(fixtures.BasicEntity): pass - class T2(fixtures.BasicEntity): pass + class T2(fixtures.BasicEntity): + pass mapper(T2, t2) mapper(T1, t1, properties={ - 't2s':relationship(T2, secondary=t3, backref='t1s')}) + 't2s': relationship(T2, secondary=t3, backref='t1s')}) a = T1() a.col1 = "aid" @@ -1775,7 +1865,9 @@ class TypedAssociationTable(fixtures.MappedTest): assert t3.count().scalar() == 1 + class CustomOperatorTest(fixtures.MappedTest, AssertsCompiledSQL): + """test op() in conjunction with join conditions""" run_create_tables = run_deletes = None @@ -1785,47 +1877,50 @@ class CustomOperatorTest(fixtures.MappedTest, AssertsCompiledSQL): @classmethod def define_tables(cls, metadata): Table('a', metadata, - Column('id', Integer, primary_key=True), - Column('foo', String(50)) - ) + Column('id', Integer, primary_key=True), + Column('foo', String(50)) + ) Table('b', metadata, - Column('id', Integer, primary_key=True), - Column('foo', String(50)) - ) + Column('id', Integer, primary_key=True), + Column('foo', String(50)) + ) def test_join_on_custom_op(self): class A(fixtures.BasicEntity): pass + class B(fixtures.BasicEntity): pass mapper(A, self.tables.a, properties={ - 'bs': relationship(B, - primaryjoin=self.tables.a.c.foo.op( - '&*', is_comparison=True - )(foreign(self.tables.b.c.foo)), - viewonly=True - ) - }) + 'bs': relationship(B, + primaryjoin=self.tables.a.c.foo.op( + '&*', is_comparison=True + )(foreign(self.tables.b.c.foo)), + viewonly=True + ) + }) mapper(B, self.tables.b) self.assert_compile( Session().query(A).join(A.bs), - "SELECT a.id AS a_id, a.foo AS a_foo FROM a JOIN b ON a.foo &* b.foo" + "SELECT a.id AS a_id, a.foo AS a_foo " + "FROM a JOIN b ON a.foo &* b.foo" ) class ViewOnlyHistoryTest(fixtures.MappedTest): + @classmethod def define_tables(cls, metadata): Table("t1", metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data', String(40))) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(40))) Table("t2", metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data', String(40)), - Column('t1id', Integer, ForeignKey('t1.id'))) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(40)), + Column('t1id', Integer, ForeignKey('t1.id'))) def _assert_fk(self, a1, b1, is_set): s = Session(testing.db) @@ -1842,12 +1937,13 @@ class ViewOnlyHistoryTest(fixtures.MappedTest): def test_o2m_viewonly_oneside(self): class A(fixtures.ComparableEntity): pass + class B(fixtures.ComparableEntity): pass mapper(A, self.tables.t1, properties={ "bs": relationship(B, viewonly=True, - backref=backref("a", viewonly=False)) + backref=backref("a", viewonly=False)) }) mapper(B, self.tables.t2) @@ -1867,12 +1963,13 @@ class ViewOnlyHistoryTest(fixtures.MappedTest): def test_m2o_viewonly_oneside(self): class A(fixtures.ComparableEntity): pass + class B(fixtures.ComparableEntity): pass mapper(A, self.tables.t1, properties={ "bs": relationship(B, viewonly=False, - backref=backref("a", viewonly=True)) + backref=backref("a", viewonly=True)) }) mapper(B, self.tables.t2) @@ -1892,6 +1989,7 @@ class ViewOnlyHistoryTest(fixtures.MappedTest): def test_o2m_viewonly_only(self): class A(fixtures.ComparableEntity): pass + class B(fixtures.ComparableEntity): pass @@ -1910,13 +2008,14 @@ class ViewOnlyHistoryTest(fixtures.MappedTest): def test_m2o_viewonly_only(self): class A(fixtures.ComparableEntity): pass + class B(fixtures.ComparableEntity): pass mapper(A, self.tables.t1) mapper(B, self.tables.t2, properties={ 'a': relationship(A, viewonly=True) - }) + }) a1 = A() b1 = B() @@ -1925,34 +2024,39 @@ class ViewOnlyHistoryTest(fixtures.MappedTest): self._assert_fk(a1, b1, False) + class ViewOnlyM2MBackrefTest(fixtures.MappedTest): + @classmethod def define_tables(cls, metadata): Table("t1", metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data', String(40))) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(40))) Table("t2", metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data', String(40)), - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(40)), + ) Table("t1t2", metadata, - Column('t1id', Integer, ForeignKey('t1.id'), primary_key=True), - Column('t2id', Integer, ForeignKey('t2.id'), primary_key=True), - ) + Column('t1id', Integer, ForeignKey('t1.id'), primary_key=True), + Column('t2id', Integer, ForeignKey('t2.id'), primary_key=True), + ) def test_viewonly(self): t1t2, t2, t1 = (self.tables.t1t2, - self.tables.t2, - self.tables.t1) + self.tables.t2, + self.tables.t1) - class A(fixtures.ComparableEntity):pass - class B(fixtures.ComparableEntity):pass + class A(fixtures.ComparableEntity): + pass + + class B(fixtures.ComparableEntity): + pass mapper(A, t1, properties={ - 'bs':relationship(B, secondary=t1t2, - backref=backref('as_', viewonly=True)) + 'bs': relationship(B, secondary=t1t2, + backref=backref('as_', viewonly=True)) }) mapper(B, t2) @@ -1971,25 +2075,27 @@ class ViewOnlyM2MBackrefTest(fixtures.MappedTest): sess.query(B).first(), B(as_=[A(id=a1.id)]) ) + class ViewOnlyOverlappingNames(fixtures.MappedTest): + """'viewonly' mappings with overlapping PK column names.""" @classmethod def define_tables(cls, metadata): Table("t1", metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data', String(40))) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(40))) Table("t2", metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data', String(40)), - Column('t1id', Integer, ForeignKey('t1.id'))) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(40)), + Column('t1id', Integer, ForeignKey('t1.id'))) Table("t3", metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data', String(40)), - Column('t2id', Integer, ForeignKey('t2.id'))) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(40)), + Column('t2id', Integer, ForeignKey('t2.id'))) def test_three_table_view(self): """A three table join with overlapping PK names. @@ -2001,23 +2107,29 @@ class ViewOnlyOverlappingNames(fixtures.MappedTest): """ t2, t3, t1 = (self.tables.t2, - self.tables.t3, - self.tables.t1) + self.tables.t3, + self.tables.t1) + + class C1(fixtures.BasicEntity): + pass - class C1(fixtures.BasicEntity): pass - class C2(fixtures.BasicEntity): pass - class C3(fixtures.BasicEntity): pass + class C2(fixtures.BasicEntity): + pass + + class C3(fixtures.BasicEntity): + pass mapper(C1, t1, properties={ - 't2s':relationship(C2), - 't2_view':relationship(C2, - viewonly=True, - primaryjoin=sa.and_(t1.c.id==t2.c.t1id, - t3.c.t2id==t2.c.id, - t3.c.data==t1.c.data))}) + 't2s': relationship(C2), + 't2_view': relationship( + C2, + viewonly=True, + primaryjoin=sa.and_(t1.c.id == t2.c.t1id, + t3.c.t2id == t2.c.id, + t3.c.data == t1.c.data))}) mapper(C2, t2) mapper(C3, t3, properties={ - 't2':relationship(C2)}) + 't2': relationship(C2)}) c1 = C1() c1.data = 'c1data' @@ -2026,7 +2138,7 @@ class ViewOnlyOverlappingNames(fixtures.MappedTest): c2b = C2() c1.t2s.append(c2b) c3 = C3() - c3.data='c1data' + c3.data = 'c1data' c3.t2 = c2b sess = create_session() sess.add(c1) @@ -2038,25 +2150,27 @@ class ViewOnlyOverlappingNames(fixtures.MappedTest): assert set([x.id for x in c1.t2s]) == set([c2a.id, c2b.id]) assert set([x.id for x in c1.t2_view]) == set([c2b.id]) + class ViewOnlyUniqueNames(fixtures.MappedTest): + """'viewonly' mappings with unique PK column names.""" @classmethod def define_tables(cls, metadata): Table("t1", metadata, - Column('t1id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data', String(40))) + Column('t1id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(40))) Table("t2", metadata, - Column('t2id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data', String(40)), - Column('t1id_ref', Integer, ForeignKey('t1.t1id'))) + Column('t2id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(40)), + Column('t1id_ref', Integer, ForeignKey('t1.t1id'))) Table("t3", metadata, - Column('t3id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data', String(40)), - Column('t2id_ref', Integer, ForeignKey('t2.t2id'))) + Column('t3id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(40)), + Column('t2id_ref', Integer, ForeignKey('t2.t2id'))) def test_three_table_view(self): """A three table join with overlapping PK names. @@ -2067,23 +2181,29 @@ class ViewOnlyUniqueNames(fixtures.MappedTest): """ t2, t3, t1 = (self.tables.t2, - self.tables.t3, - self.tables.t1) + self.tables.t3, + self.tables.t1) + + class C1(fixtures.BasicEntity): + pass - class C1(fixtures.BasicEntity): pass - class C2(fixtures.BasicEntity): pass - class C3(fixtures.BasicEntity): pass + class C2(fixtures.BasicEntity): + pass + + class C3(fixtures.BasicEntity): + pass mapper(C1, t1, properties={ - 't2s':relationship(C2), - 't2_view':relationship(C2, - viewonly=True, - primaryjoin=sa.and_(t1.c.t1id==t2.c.t1id_ref, - t3.c.t2id_ref==t2.c.t2id, - t3.c.data==t1.c.data))}) + 't2s': relationship(C2), + 't2_view': relationship( + C2, + viewonly=True, + primaryjoin=sa.and_(t1.c.t1id == t2.c.t1id_ref, + t3.c.t2id_ref == t2.c.t2id, + t3.c.data == t1.c.data))}) mapper(C2, t2) mapper(C3, t3, properties={ - 't2':relationship(C2)}) + 't2': relationship(C2)}) c1 = C1() c1.data = 'c1data' @@ -2092,7 +2212,7 @@ class ViewOnlyUniqueNames(fixtures.MappedTest): c2b = C2() c1.t2s.append(c2b) c3 = C3() - c3.data='c1data' + c3.data = 'c1data' c3.t2 = c2b sess = create_session() @@ -2104,30 +2224,35 @@ class ViewOnlyUniqueNames(fixtures.MappedTest): assert set([x.t2id for x in c1.t2s]) == set([c2a.t2id, c2b.t2id]) assert set([x.t2id for x in c1.t2_view]) == set([c2b.t2id]) + class ViewOnlyLocalRemoteM2M(fixtures.TestBase): + """test that local-remote is correctly determined for m2m""" def test_local_remote(self): meta = MetaData() t1 = Table('t1', meta, - Column('id', Integer, primary_key=True), - ) + Column('id', Integer, primary_key=True), + ) t2 = Table('t2', meta, - Column('id', Integer, primary_key=True), - ) + Column('id', Integer, primary_key=True), + ) t12 = Table('tab', meta, - Column('t1_id', Integer, ForeignKey('t1.id',)), - Column('t2_id', Integer, ForeignKey('t2.id',)), - ) + Column('t1_id', Integer, ForeignKey('t1.id',)), + Column('t2_id', Integer, ForeignKey('t2.id',)), + ) - class A(object): pass - class B(object): pass - mapper( B, t2, ) - m = mapper( A, t1, properties=dict( - b_view = relationship( B, secondary=t12, viewonly=True), - b_plain= relationship( B, secondary=t12), - ) + class A(object): + pass + + class B(object): + pass + mapper(B, t2, ) + m = mapper(A, t1, properties=dict( + b_view=relationship(B, secondary=t12, viewonly=True), + b_plain=relationship(B, secondary=t12), + ) ) configure_mappers() assert m.get_property('b_view').local_remote_pairs == \ @@ -2135,31 +2260,32 @@ class ViewOnlyLocalRemoteM2M(fixtures.TestBase): [(t1.c.id, t12.c.t1_id), (t2.c.id, t12.c.t2_id)] - class ViewOnlyNonEquijoin(fixtures.MappedTest): + """'viewonly' mappings based on non-equijoins.""" @classmethod def define_tables(cls, metadata): Table('foos', metadata, - Column('id', Integer, primary_key=True)) + Column('id', Integer, primary_key=True)) Table('bars', metadata, - Column('id', Integer, primary_key=True), - Column('fid', Integer)) + Column('id', Integer, primary_key=True), + Column('fid', Integer)) def test_viewonly_join(self): bars, foos = self.tables.bars, self.tables.foos class Foo(fixtures.ComparableEntity): pass + class Bar(fixtures.ComparableEntity): pass mapper(Foo, foos, properties={ - 'bars':relationship(Bar, - primaryjoin=foos.c.id > bars.c.fid, - foreign_keys=[bars.c.fid], - viewonly=True)}) + 'bars': relationship(Bar, + primaryjoin=foos.c.id > bars.c.fid, + foreign_keys=[bars.c.fid], + viewonly=True)}) mapper(Bar, bars) @@ -2180,17 +2306,22 @@ class ViewOnlyNonEquijoin(fixtures.MappedTest): class ViewOnlyRepeatedRemoteColumn(fixtures.MappedTest): + """'viewonly' mappings that contain the same 'remote' column twice""" @classmethod def define_tables(cls, metadata): Table('foos', metadata, - Column('id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('bid1', Integer,ForeignKey('bars.id')), - Column('bid2', Integer,ForeignKey('bars.id'))) + Column( + 'id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('bid1', Integer, ForeignKey('bars.id')), + Column('bid2', Integer, ForeignKey('bars.id'))) Table('bars', metadata, - Column('id', Integer, primary_key=True, test_needs_autoincrement=True), + Column( + 'id', Integer, primary_key=True, + test_needs_autoincrement=True), Column('data', String(50))) def test_relationship_on_or(self): @@ -2198,15 +2329,16 @@ class ViewOnlyRepeatedRemoteColumn(fixtures.MappedTest): class Foo(fixtures.ComparableEntity): pass + class Bar(fixtures.ComparableEntity): pass mapper(Foo, foos, properties={ - 'bars':relationship(Bar, - primaryjoin=sa.or_(bars.c.id == foos.c.bid1, - bars.c.id == foos.c.bid2), - uselist=True, - viewonly=True)}) + 'bars': relationship(Bar, + primaryjoin=sa.or_(bars.c.id == foos.c.bid1, + bars.c.id == foos.c.bid2), + uselist=True, + viewonly=True)}) mapper(Bar, bars) sess = create_session() @@ -2228,18 +2360,20 @@ class ViewOnlyRepeatedRemoteColumn(fixtures.MappedTest): eq_(sess.query(Foo).filter_by(id=f2.id).one(), Foo(bars=[Bar(data='b3')])) + class ViewOnlyRepeatedLocalColumn(fixtures.MappedTest): + """'viewonly' mappings that contain the same 'local' column twice""" @classmethod def define_tables(cls, metadata): Table('foos', metadata, Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), + test_needs_autoincrement=True), Column('data', String(50))) Table('bars', metadata, Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), + test_needs_autoincrement=True), Column('fid1', Integer, ForeignKey('foos.id')), Column('fid2', Integer, ForeignKey('foos.id')), Column('data', String(50))) @@ -2249,14 +2383,15 @@ class ViewOnlyRepeatedLocalColumn(fixtures.MappedTest): class Foo(fixtures.ComparableEntity): pass + class Bar(fixtures.ComparableEntity): pass mapper(Foo, foos, properties={ - 'bars':relationship(Bar, - primaryjoin=sa.or_(bars.c.fid1 == foos.c.id, - bars.c.fid2 == foos.c.id), - viewonly=True)}) + 'bars': relationship(Bar, + primaryjoin=sa.or_(bars.c.fid1 == foos.c.id, + bars.c.fid2 == foos.c.id), + viewonly=True)}) mapper(Bar, bars) sess = create_session() @@ -2279,57 +2414,61 @@ class ViewOnlyRepeatedLocalColumn(fixtures.MappedTest): eq_(sess.query(Foo).filter_by(id=f2.id).one(), Foo(bars=[Bar(data='b3'), Bar(data='b4')])) + class ViewOnlyComplexJoin(_RelationshipErrors, fixtures.MappedTest): + """'viewonly' mappings with a complex join condition.""" @classmethod def define_tables(cls, metadata): Table('t1', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data', String(50))) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(50))) Table('t2', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data', String(50)), - Column('t1id', Integer, ForeignKey('t1.id'))) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(50)), + Column('t1id', Integer, ForeignKey('t1.id'))) Table('t3', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data', String(50))) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(50))) Table('t2tot3', metadata, - Column('t2id', Integer, ForeignKey('t2.id')), - Column('t3id', Integer, ForeignKey('t3.id'))) + Column('t2id', Integer, ForeignKey('t2.id')), + Column('t3id', Integer, ForeignKey('t3.id'))) @classmethod def setup_classes(cls): class T1(cls.Comparable): pass + class T2(cls.Comparable): pass + class T3(cls.Comparable): pass def test_basic(self): T1, t2, T2, T3, t3, t2tot3, t1 = (self.classes.T1, - self.tables.t2, - self.classes.T2, - self.classes.T3, - self.tables.t3, - self.tables.t2tot3, - self.tables.t1) + self.tables.t2, + self.classes.T2, + self.classes.T3, + self.tables.t3, + self.tables.t2tot3, + self.tables.t1) mapper(T1, t1, properties={ - 't3s':relationship(T3, primaryjoin=sa.and_( - t1.c.id==t2.c.t1id, - t2.c.id==t2tot3.c.t2id, - t3.c.id==t2tot3.c.t3id), - viewonly=True, - foreign_keys=t3.c.id, remote_side=t2.c.t1id) + 't3s': relationship(T3, primaryjoin=sa.and_( + t1.c.id == t2.c.t1id, + t2.c.id == t2tot3.c.t2id, + t3.c.id == t2tot3.c.t3id), + viewonly=True, + foreign_keys=t3.c.id, remote_side=t2.c.t1id) }) mapper(T2, t2, properties={ - 't1':relationship(T1), - 't3s':relationship(T3, secondary=t2tot3) + 't1': relationship(T1), + 't3s': relationship(T3, secondary=t2tot3) }) mapper(T3, t3) @@ -2341,31 +2480,32 @@ class ViewOnlyComplexJoin(_RelationshipErrors, fixtures.MappedTest): a = sess.query(T1).first() eq_(a.t3s, [T3(data='t3')]) - def test_remote_side_escalation(self): T1, t2, T2, T3, t3, t2tot3, t1 = (self.classes.T1, - self.tables.t2, - self.classes.T2, - self.classes.T3, - self.tables.t3, - self.tables.t2tot3, - self.tables.t1) + self.tables.t2, + self.classes.T2, + self.classes.T3, + self.tables.t3, + self.tables.t2tot3, + self.tables.t1) mapper(T1, t1, properties={ - 't3s':relationship(T3, - primaryjoin=sa.and_(t1.c.id==t2.c.t1id, - t2.c.id==t2tot3.c.t2id, - t3.c.id==t2tot3.c.t3id - ), - viewonly=True, - foreign_keys=t3.c.id)}) + 't3s': relationship(T3, + primaryjoin=sa.and_(t1.c.id == t2.c.t1id, + t2.c.id == t2tot3.c.t2id, + t3.c.id == t2tot3.c.t3id + ), + viewonly=True, + foreign_keys=t3.c.id)}) mapper(T2, t2, properties={ - 't1':relationship(T1), - 't3s':relationship(T3, secondary=t2tot3)}) + 't1': relationship(T1), + 't3s': relationship(T3, secondary=t2tot3)}) mapper(T3, t3) self._assert_raises_no_local_remote(configure_mappers, "T1.t3s") + class RemoteForeignBetweenColsTest(fixtures.DeclarativeMappedTest): + """test a complex annotation using between(). Using declarative here as an integration test for the local() @@ -2381,23 +2521,23 @@ class RemoteForeignBetweenColsTest(fixtures.DeclarativeMappedTest): __tablename__ = "network" id = Column(sa.Integer, primary_key=True, - test_needs_autoincrement=True) + test_needs_autoincrement=True) ip_net_addr = Column(Integer) ip_broadcast_addr = Column(Integer) - addresses = relationship("Address", - primaryjoin="remote(foreign(Address.ip_addr)).between(" - "Network.ip_net_addr," - "Network.ip_broadcast_addr)", - viewonly=True - ) + addresses = relationship( + "Address", + primaryjoin="remote(foreign(Address.ip_addr)).between(" + "Network.ip_net_addr," + "Network.ip_broadcast_addr)", + viewonly=True + ) class Address(fixtures.ComparableEntity, Base): __tablename__ = "address" ip_addr = Column(Integer, primary_key=True) - @classmethod def insert_data(cls): Network, Address = cls.classes.Network, cls.classes.Address @@ -2417,11 +2557,11 @@ class RemoteForeignBetweenColsTest(fixtures.DeclarativeMappedTest): session = Session(testing.db) eq_( - session.query(Address.ip_addr).\ - select_from(Network).\ - join(Network.addresses).\ - filter(Network.ip_net_addr == 15).\ - all(), + session.query(Address.ip_addr). + select_from(Network). + join(Network.addresses). + filter(Network.ip_net_addr == 15). + all(), [(17, ), (18, )] ) @@ -2439,59 +2579,61 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): Table('t1', metadata, - Column('id', String(50), primary_key=True, - test_needs_autoincrement=True), - Column('data', String(50))) + Column('id', String(50), primary_key=True, + test_needs_autoincrement=True), + Column('data', String(50))) Table('t2', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data', String(50)), - Column('t1id', String(50))) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', String(50)), + Column('t1id', String(50))) @classmethod def setup_classes(cls): class T1(cls.Comparable): pass + class T2(cls.Comparable): pass def test_onetomany_funcfk_oldstyle(self): T2, T1, t2, t1 = (self.classes.T2, - self.classes.T1, - self.tables.t2, - self.tables.t1) + self.classes.T1, + self.tables.t2, + self.tables.t1) # old _local_remote_pairs mapper(T1, t1, properties={ - 't2s':relationship(T2, - primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id), - _local_remote_pairs=[(t1.c.id, t2.c.t1id)], - foreign_keys=[t2.c.t1id] - ) - }) + 't2s': relationship( + T2, + primaryjoin=t1.c.id == sa.func.lower(t2.c.t1id), + _local_remote_pairs=[(t1.c.id, t2.c.t1id)], + foreign_keys=[t2.c.t1id] + ) + }) mapper(T2, t2) self._test_onetomany() def test_onetomany_funcfk_annotated(self): T2, T1, t2, t1 = (self.classes.T2, - self.classes.T1, - self.tables.t2, - self.tables.t1) + self.classes.T1, + self.tables.t2, + self.tables.t1) # use annotation mapper(T1, t1, properties={ - 't2s':relationship(T2, - primaryjoin=t1.c.id== - foreign(sa.func.lower(t2.c.t1id)), - )}) + 't2s': relationship(T2, + primaryjoin=t1.c.id == + foreign(sa.func.lower(t2.c.t1id)), + )}) mapper(T2, t2) self._test_onetomany() def _test_onetomany(self): T2, T1, t2, t1 = (self.classes.T2, - self.classes.T1, - self.tables.t2, - self.tables.t1) + self.classes.T1, + self.tables.t2, + self.tables.t1) is_(T1.t2s.property.direction, ONETOMANY) eq_(T1.t2s.property.local_remote_pairs, [(t1.c.id, t2.c.t1id)]) sess = create_session() @@ -2511,17 +2653,17 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest): def test_manytoone_funcfk(self): T2, T1, t2, t1 = (self.classes.T2, - self.classes.T1, - self.tables.t2, - self.tables.t1) + self.classes.T1, + self.tables.t2, + self.tables.t1) mapper(T1, t1) mapper(T2, t2, properties={ - 't1':relationship(T1, - primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id), - _local_remote_pairs=[(t2.c.t1id, t1.c.id)], - foreign_keys=[t2.c.t1id], - uselist=True)}) + 't1': relationship(T1, + primaryjoin=t1.c.id == sa.func.lower(t2.c.t1id), + _local_remote_pairs=[(t2.c.t1id, t1.c.id)], + foreign_keys=[t2.c.t1id], + uselist=True)}) sess = create_session() a1 = T1(id='number1', data='a1') @@ -2539,15 +2681,16 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest): def test_onetomany_func_referent(self): T2, T1, t2, t1 = (self.classes.T2, - self.classes.T1, - self.tables.t2, - self.tables.t1) + self.classes.T1, + self.tables.t2, + self.tables.t1) mapper(T1, t1, properties={ - 't2s':relationship(T2, - primaryjoin=sa.func.lower(t1.c.id)==t2.c.t1id, - _local_remote_pairs=[(t1.c.id, t2.c.t1id)], - foreign_keys=[t2.c.t1id])}) + 't2s': relationship( + T2, + primaryjoin=sa.func.lower(t1.c.id) == t2.c.t1id, + _local_remote_pairs=[(t1.c.id, t2.c.t1id)], + foreign_keys=[t2.c.t1id])}) mapper(T2, t2) sess = create_session() @@ -2562,21 +2705,21 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest): eq_(sess.query(T1).first(), T1(id='NuMbeR1', data='a1', t2s=[ - T2(data='b1', t1id='number1'), - T2(data='b2', t1id='number1')])) + T2(data='b1', t1id='number1'), + T2(data='b2', t1id='number1')])) def test_manytoone_func_referent(self): T2, T1, t2, t1 = (self.classes.T2, - self.classes.T1, - self.tables.t2, - self.tables.t1) + self.classes.T1, + self.tables.t2, + self.tables.t1) mapper(T1, t1) mapper(T2, t2, properties={ - 't1':relationship(T1, - primaryjoin=sa.func.lower(t1.c.id)==t2.c.t1id, - _local_remote_pairs=[(t2.c.t1id, t1.c.id)], - foreign_keys=[t2.c.t1id], uselist=True)}) + 't1': relationship(T1, + primaryjoin=sa.func.lower(t1.c.id) == t2.c.t1id, + _local_remote_pairs=[(t2.c.t1id, t1.c.id)], + foreign_keys=[t2.c.t1id], uselist=True)}) sess = create_session() a1 = T1(id='NuMbeR1', data='a1') @@ -2594,40 +2737,44 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest): def test_escalation_1(self): T2, T1, t2, t1 = (self.classes.T2, - self.classes.T1, - self.tables.t2, - self.tables.t1) + self.classes.T1, + self.tables.t2, + self.tables.t1) mapper(T1, t1, properties={ - 't2s':relationship(T2, - primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id), - _local_remote_pairs=[(t1.c.id, t2.c.t1id)], - foreign_keys=[t2.c.t1id], - remote_side=[t2.c.t1id])}) + 't2s': relationship( + T2, + primaryjoin=t1.c.id == sa.func.lower(t2.c.t1id), + _local_remote_pairs=[(t1.c.id, t2.c.t1id)], + foreign_keys=[t2.c.t1id], + remote_side=[t2.c.t1id])}) mapper(T2, t2) assert_raises(sa.exc.ArgumentError, sa.orm.configure_mappers) def test_escalation_2(self): T2, T1, t2, t1 = (self.classes.T2, - self.classes.T1, - self.tables.t2, - self.tables.t1) + self.classes.T1, + self.tables.t2, + self.tables.t1) mapper(T1, t1, properties={ - 't2s':relationship(T2, - primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id), - _local_remote_pairs=[(t1.c.id, t2.c.t1id)])}) + 't2s': relationship( + T2, + primaryjoin=t1.c.id == sa.func.lower(t2.c.t1id), + _local_remote_pairs=[(t1.c.id, t2.c.t1id)])}) mapper(T2, t2) assert_raises(sa.exc.ArgumentError, sa.orm.configure_mappers) + class InvalidRemoteSideTest(fixtures.MappedTest): + @classmethod def define_tables(cls, metadata): Table('t1', metadata, - Column('id', Integer, primary_key=True), - Column('data', String(50)), - Column('t_id', Integer, ForeignKey('t1.id')) - ) + Column('id', Integer, primary_key=True), + Column('data', String(50)), + Column('t_id', Integer, ForeignKey('t1.id')) + ) @classmethod def setup_classes(cls): @@ -2638,10 +2785,11 @@ class InvalidRemoteSideTest(fixtures.MappedTest): T1, t1 = self.classes.T1, self.tables.t1 mapper(T1, t1, properties={ - 't1s':relationship(T1, backref='parent') + 't1s': relationship(T1, backref='parent') }) - assert_raises_message(sa.exc.ArgumentError, + assert_raises_message( + sa.exc.ArgumentError, "T1.t1s and back-reference T1.parent are " r"both of the same direction symbol\('ONETOMANY'\). Did you " "mean to set remote_side on the many-to-one side ?", @@ -2651,12 +2799,13 @@ class InvalidRemoteSideTest(fixtures.MappedTest): T1, t1 = self.classes.T1, self.tables.t1 mapper(T1, t1, properties={ - 't1s':relationship(T1, - backref=backref('parent', remote_side=t1.c.id), - remote_side=t1.c.id) + 't1s': relationship(T1, + backref=backref('parent', remote_side=t1.c.id), + remote_side=t1.c.id) }) - assert_raises_message(sa.exc.ArgumentError, + assert_raises_message( + sa.exc.ArgumentError, "T1.t1s and back-reference T1.parent are " r"both of the same direction symbol\('MANYTOONE'\). Did you " "mean to set remote_side on the many-to-one side ?", @@ -2666,12 +2815,13 @@ class InvalidRemoteSideTest(fixtures.MappedTest): T1, t1 = self.classes.T1, self.tables.t1 mapper(T1, t1, properties={ - 't1s':relationship(T1, back_populates='parent'), - 'parent':relationship(T1, back_populates='t1s'), + 't1s': relationship(T1, back_populates='parent'), + 'parent': relationship(T1, back_populates='t1s'), }) # can't be sure of ordering here - assert_raises_message(sa.exc.ArgumentError, + assert_raises_message( + sa.exc.ArgumentError, r"both of the same direction symbol\('ONETOMANY'\). Did you " "mean to set remote_side on the many-to-one side ?", configure_mappers) @@ -2680,44 +2830,48 @@ class InvalidRemoteSideTest(fixtures.MappedTest): T1, t1 = self.classes.T1, self.tables.t1 mapper(T1, t1, properties={ - 't1s':relationship(T1, back_populates='parent', + 't1s': relationship(T1, back_populates='parent', remote_side=t1.c.id), - 'parent':relationship(T1, back_populates='t1s', - remote_side=t1.c.id) + 'parent': relationship(T1, back_populates='t1s', + remote_side=t1.c.id) }) # can't be sure of ordering here - assert_raises_message(sa.exc.ArgumentError, + assert_raises_message( + sa.exc.ArgumentError, r"both of the same direction symbol\('MANYTOONE'\). Did you " "mean to set remote_side on the many-to-one side ?", configure_mappers) + class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest): + @classmethod def define_tables(cls, metadata): Table("a", metadata, - Column('id', Integer, primary_key=True) - ) + Column('id', Integer, primary_key=True) + ) Table("b", metadata, - Column('id', Integer, primary_key=True), - Column('aid_1', Integer, ForeignKey('a.id')), - Column('aid_2', Integer, ForeignKey('a.id')), - ) + Column('id', Integer, primary_key=True), + Column('aid_1', Integer, ForeignKey('a.id')), + Column('aid_2', Integer, ForeignKey('a.id')), + ) Table("atob", metadata, - Column('aid', Integer), - Column('bid', Integer), - ) + Column('aid', Integer), + Column('bid', Integer), + ) Table("atob_ambiguous", metadata, - Column('aid1', Integer, ForeignKey('a.id')), - Column('bid1', Integer, ForeignKey('b.id')), - Column('aid2', Integer, ForeignKey('a.id')), - Column('bid2', Integer, ForeignKey('b.id')), - ) + Column('aid1', Integer, ForeignKey('a.id')), + Column('bid1', Integer, ForeignKey('b.id')), + Column('aid2', Integer, ForeignKey('a.id')), + Column('bid2', Integer, ForeignKey('b.id')), + ) @classmethod def setup_classes(cls): class A(cls.Basic): pass + class B(cls.Basic): pass @@ -2725,7 +2879,7 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest): A, B = self.classes.A, self.classes.B a, b = self.tables.a, self.tables.b mapper(A, a, properties={ - 'bs':relationship(B) + 'bs': relationship(B) }) mapper(B, b) self._assert_raises_ambig_join( @@ -2738,12 +2892,12 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest): A, B = self.classes.A, self.classes.B a, b = self.tables.a, self.tables.b mapper(A, a, properties={ - 'bs':relationship(B, foreign_keys=b.c.aid_1) + 'bs': relationship(B, foreign_keys=b.c.aid_1) }) mapper(B, b) sa.orm.configure_mappers() assert A.bs.property.primaryjoin.compare( - a.c.id==b.c.aid_1 + a.c.id == b.c.aid_1 ) eq_( A.bs.property._calculated_foreign_keys, @@ -2754,12 +2908,12 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest): A, B = self.classes.A, self.classes.B a, b = self.tables.a, self.tables.b mapper(A, a, properties={ - 'bs':relationship(B, primaryjoin=a.c.id==b.c.aid_1) + 'bs': relationship(B, primaryjoin=a.c.id == b.c.aid_1) }) mapper(B, b) sa.orm.configure_mappers() assert A.bs.property.primaryjoin.compare( - a.c.id==b.c.aid_1 + a.c.id == b.c.aid_1 ) eq_( A.bs.property._calculated_foreign_keys, @@ -2770,12 +2924,12 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest): A, B = self.classes.A, self.classes.B a, b = self.tables.a, self.tables.b mapper(A, a, properties={ - 'bs':relationship(B, primaryjoin=a.c.id==foreign(b.c.aid_1)) + 'bs': relationship(B, primaryjoin=a.c.id == foreign(b.c.aid_1)) }) mapper(B, b) sa.orm.configure_mappers() assert A.bs.property.primaryjoin.compare( - a.c.id==b.c.aid_1 + a.c.id == b.c.aid_1 ) eq_( A.bs.property._calculated_foreign_keys, @@ -2786,7 +2940,7 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest): A, B = self.classes.A, self.classes.B a, b, a_to_b = self.tables.a, self.tables.b, self.tables.atob mapper(A, a, properties={ - 'bs':relationship(B, secondary=a_to_b) + 'bs': relationship(B, secondary=a_to_b) }) mapper(B, b) self._assert_raises_no_join( @@ -2798,7 +2952,7 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest): A, B = self.classes.A, self.classes.B a, b, a_to_b = self.tables.a, self.tables.b, self.tables.atob_ambiguous mapper(A, a, properties={ - 'bs':relationship(B, secondary=a_to_b) + 'bs': relationship(B, secondary=a_to_b) }) mapper(B, b) @@ -2808,20 +2962,20 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest): "atob_ambiguous" ) - def test_with_fks_m2m(self): A, B = self.classes.A, self.classes.B a, b, a_to_b = self.tables.a, self.tables.b, self.tables.atob_ambiguous mapper(A, a, properties={ - 'bs':relationship(B, secondary=a_to_b, - foreign_keys=[a_to_b.c.aid1, a_to_b.c.bid1]) + 'bs': relationship(B, secondary=a_to_b, + foreign_keys=[a_to_b.c.aid1, a_to_b.c.bid1]) }) mapper(B, b) sa.orm.configure_mappers() class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL, - testing.AssertsExecutionResults): + testing.AssertsExecutionResults): + """test support for a relationship where the 'secondary' table is a compound join(). @@ -2835,35 +2989,44 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL, @classmethod def define_tables(cls, metadata): - Table('a', metadata, - Column('id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('name', String(30)), - Column('b_id', ForeignKey('b.id')) - ) + Table( + 'a', metadata, + Column( + 'id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('name', String(30)), + Column('b_id', ForeignKey('b.id')) + ) Table('b', metadata, - Column('id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('name', String(30)), - Column('d_id', ForeignKey('d.id')) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('name', String(30)), + Column('d_id', ForeignKey('d.id')) + ) Table('c', metadata, - Column('id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('name', String(30)), - Column('a_id', ForeignKey('a.id')), - Column('d_id', ForeignKey('d.id')) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('name', String(30)), + Column('a_id', ForeignKey('a.id')), + Column('d_id', ForeignKey('d.id')) + ) Table('d', metadata, - Column('id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('name', String(30)), - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('name', String(30)), + ) @classmethod def setup_classes(cls): class A(cls.Comparable): pass + class B(cls.Comparable): pass + class C(cls.Comparable): pass + class D(cls.Comparable): pass @@ -2875,21 +3038,22 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL, #j = join(b, d, b.c.d_id == d.c.id).join(c, c.c.d_id == d.c.id).alias() mapper(A, a, properties={ "b": relationship(B), - "d": relationship(D, secondary=j, - primaryjoin=and_(a.c.b_id == b.c.id, a.c.id == c.c.a_id), - secondaryjoin=d.c.id == b.c.d_id, - #primaryjoin=and_(a.c.b_id == j.c.b_id, a.c.id == j.c.c_a_id), - #secondaryjoin=d.c.id == j.c.b_d_id, - uselist=False - ) - }) + "d": relationship( + D, secondary=j, + primaryjoin=and_(a.c.b_id == b.c.id, a.c.id == c.c.a_id), + secondaryjoin=d.c.id == b.c.d_id, + #primaryjoin=and_(a.c.b_id == j.c.b_id, a.c.id == j.c.c_a_id), + #secondaryjoin=d.c.id == j.c.b_d_id, + uselist=False + ) + }) mapper(B, b, properties={ - "d": relationship(D) - }) + "d": relationship(D) + }) mapper(C, c, properties={ - "a": relationship(A), - "d": relationship(D) - }) + "a": relationship(A), + "d": relationship(D) + }) mapper(D, d) @classmethod @@ -2931,8 +3095,8 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL, sess.query(A).join(A.d), "SELECT a.id AS a_id, a.name AS a_name, a.b_id AS a_b_id " "FROM a JOIN (b AS b_1 JOIN d AS d_1 ON b_1.d_id = d_1.id " - "JOIN c AS c_1 ON c_1.d_id = d_1.id) ON a.b_id = b_1.id " - "AND a.id = c_1.a_id JOIN d ON d.id = b_1.d_id", + "JOIN c AS c_1 ON c_1.d_id = d_1.id) ON a.b_id = b_1.id " + "AND a.id = c_1.a_id JOIN d ON d.id = b_1.d_id", dialect="postgresql" ) @@ -2944,8 +3108,8 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL, "SELECT a.id AS a_id, a.name AS a_name, a.b_id AS a_b_id, " "d_1.id AS d_1_id, d_1.name AS d_1_name FROM a LEFT OUTER JOIN " "(b AS b_1 JOIN d AS d_2 ON b_1.d_id = d_2.id JOIN c AS c_1 " - "ON c_1.d_id = d_2.id JOIN d AS d_1 ON d_1.id = b_1.d_id) " - "ON a.b_id = b_1.id AND a.id = c_1.a_id", + "ON c_1.d_id = d_2.id JOIN d AS d_1 ON d_1.id = b_1.d_id) " + "ON a.b_id = b_1.id AND a.id = c_1.a_id", dialect="postgresql" ) @@ -2964,14 +3128,15 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL, # referring to just the columns wont actually render all those # join conditions. self.assert_sql_execution( - testing.db, - go, - CompiledSQL( - "SELECT d.id AS d_id, d.name AS d_name FROM b " - "JOIN d ON b.d_id = d.id JOIN c ON c.d_id = d.id " - "WHERE :param_1 = b.id AND :param_2 = c.a_id AND d.id = b.d_id", - {'param_1': a1.id, 'param_2': a1.id} - ) + testing.db, + go, + CompiledSQL( + "SELECT d.id AS d_id, d.name AS d_name FROM b " + "JOIN d ON b.d_id = d.id JOIN c ON c.d_id = d.id " + "WHERE :param_1 = b.id AND :param_2 = c.a_id " + "AND d.id = b.d_id", + {'param_1': a1.id, 'param_2': a1.id} + ) ) mapping = { @@ -2988,7 +3153,6 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL, for a, d in sess.query(A, D).outerjoin(A.d): eq_(self.mapping[a.name], d.name if d is not None else None) - def test_joinedload(self): A, D = self.classes.A, self.classes.D sess = Session() @@ -3005,7 +3169,9 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL, d = a.d eq_(self.mapping[a.name], d.name if d is not None else None) -class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest): + +class InvalidRelationshipEscalationTest( + _RelationshipErrors, fixtures.MappedTest): @classmethod def define_tables(cls, metadata): @@ -3017,20 +3183,20 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest Column('fid', Integer)) Table('foos_with_fks', metadata, - Column('id', Integer, primary_key=True), - Column('fid', Integer, ForeignKey('foos_with_fks.id'))) + Column('id', Integer, primary_key=True), + Column('fid', Integer, ForeignKey('foos_with_fks.id'))) Table('bars_with_fks', metadata, - Column('id', Integer, primary_key=True), - Column('fid', Integer, ForeignKey('foos_with_fks.id'))) + Column('id', Integer, primary_key=True), + Column('fid', Integer, ForeignKey('foos_with_fks.id'))) @classmethod def setup_classes(cls): class Foo(cls.Basic): pass + class Bar(cls.Basic): pass - def test_no_join(self): bars, Foo, Bar, foos = (self.tables.bars, self.classes.Foo, @@ -3038,12 +3204,12 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest self.tables.foos) mapper(Foo, foos, properties={ - 'bars':relationship(Bar)}) + 'bars': relationship(Bar)}) mapper(Bar, bars) self._assert_raises_no_join(sa.orm.configure_mappers, - "Foo.bars", None - ) + "Foo.bars", None + ) def test_no_join_self_ref(self): bars, Foo, Bar, foos = (self.tables.bars, @@ -3052,7 +3218,7 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest self.tables.foos) mapper(Foo, foos, properties={ - 'foos':relationship(Foo)}) + 'foos': relationship(Foo)}) mapper(Bar, bars) self._assert_raises_no_join( @@ -3068,8 +3234,8 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest self.tables.foos) mapper(Foo, foos, properties={ - 'bars':relationship(Bar, - primaryjoin=foos.c.id>bars.c.fid)}) + 'bars': relationship(Bar, + primaryjoin=foos.c.id > bars.c.fid)}) mapper(Bar, bars) self._assert_raises_no_relevant_fks( @@ -3084,9 +3250,9 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest self.tables.foos) mapper(Foo, foos, properties={ - 'bars':relationship(Bar, - primaryjoin=foos.c.id>bars.c.fid, - foreign_keys=bars.c.fid)}) + 'bars': relationship(Bar, + primaryjoin=foos.c.id > bars.c.fid, + foreign_keys=bars.c.fid)}) mapper(Bar, bars) self._assert_raises_no_equality( sa.orm.configure_mappers, @@ -3094,25 +3260,27 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest ) def test_no_equated_wo_fks_works_on_relaxed(self): - foos_with_fks, Foo, Bar, bars_with_fks, foos = (self.tables.foos_with_fks, - self.classes.Foo, - self.classes.Bar, - self.tables.bars_with_fks, - self.tables.foos) + foos_with_fks, Foo, Bar, bars_with_fks, foos = ( + self.tables.foos_with_fks, + self.classes.Foo, + self.classes.Bar, + self.tables.bars_with_fks, + self.tables.foos) # very unique - the join between parent/child # has no fks, but there is an fk join between two other # tables in the join condition, for those users that try creating # these big-long-string-of-joining-many-tables primaryjoins. - # in this case we don't get eq_pairs, but we hit the "works if viewonly" - # rule. so here we add another clause regarding "try foreign keys". + # in this case we don't get eq_pairs, but we hit the + # "works if viewonly" rule. so here we add another clause regarding + # "try foreign keys". mapper(Foo, foos, properties={ - 'bars':relationship(Bar, - primaryjoin=and_( - bars_with_fks.c.fid==foos_with_fks.c.id, - foos_with_fks.c.id==foos.c.id, - ) - )}) + 'bars': relationship(Bar, + primaryjoin=and_( + bars_with_fks.c.fid == foos_with_fks.c.id, + foos_with_fks.c.id == foos.c.id, + ) + )}) mapper(Bar, bars_with_fks) self._assert_raises_no_equality( @@ -3129,9 +3297,9 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest self.tables.foos) mapper(Foo, foos, properties={ - 'bars':relationship(Bar, - primaryjoin=foos.c.id==bars.c.fid, - foreign_keys=[foos.c.id, bars.c.fid])}) + 'bars': relationship(Bar, + primaryjoin=foos.c.id == bars.c.fid, + foreign_keys=[foos.c.id, bars.c.fid])}) mapper(Bar, bars) self._assert_raises_ambiguous_direction( @@ -3146,12 +3314,12 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest self.tables.foos) mapper(Foo, foos, properties={ - 'bars':relationship(Bar, - primaryjoin=foos.c.id==bars.c.fid, - foreign_keys=[bars.c.fid], - remote_side=[foos.c.id, bars.c.fid], - viewonly=True - )}) + 'bars': relationship(Bar, + primaryjoin=foos.c.id == bars.c.fid, + foreign_keys=[bars.c.fid], + remote_side=[foos.c.id, bars.c.fid], + viewonly=True + )}) mapper(Bar, bars) self._assert_raises_no_local_remote( @@ -3159,7 +3327,6 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest "Foo.bars", ) - def test_ambiguous_remoteside_m2o(self): bars, Foo, Bar, foos = (self.tables.bars, self.classes.Foo, @@ -3167,12 +3334,12 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest self.tables.foos) mapper(Foo, foos, properties={ - 'bars':relationship(Bar, - primaryjoin=foos.c.id==bars.c.fid, - foreign_keys=[foos.c.id], - remote_side=[foos.c.id, bars.c.fid], - viewonly=True - )}) + 'bars': relationship(Bar, + primaryjoin=foos.c.id == bars.c.fid, + foreign_keys=[foos.c.id], + remote_side=[foos.c.id, bars.c.fid], + viewonly=True + )}) mapper(Bar, bars) self._assert_raises_no_local_remote( @@ -3180,7 +3347,6 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest "Foo.bars", ) - def test_no_equated_self_ref_no_fks(self): bars, Foo, Bar, foos = (self.tables.bars, self.classes.Foo, @@ -3188,14 +3354,14 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest self.tables.foos) mapper(Foo, foos, properties={ - 'foos':relationship(Foo, - primaryjoin=foos.c.id>foos.c.fid)}) + 'foos': relationship(Foo, + primaryjoin=foos.c.id > foos.c.fid)}) mapper(Bar, bars) - self._assert_raises_no_relevant_fks(configure_mappers, - "foos.id > foos.fid", "Foo.foos", "primary" - ) - + self._assert_raises_no_relevant_fks( + configure_mappers, + "foos.id > foos.fid", "Foo.foos", "primary" + ) def test_no_equated_self_ref_no_equality(self): bars, Foo, Bar, foos = (self.tables.bars, @@ -3204,27 +3370,28 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest self.tables.foos) mapper(Foo, foos, properties={ - 'foos':relationship(Foo, - primaryjoin=foos.c.id>foos.c.fid, - foreign_keys=[foos.c.fid])}) + 'foos': relationship(Foo, + primaryjoin=foos.c.id > foos.c.fid, + foreign_keys=[foos.c.fid])}) mapper(Bar, bars) self._assert_raises_no_equality(configure_mappers, - "foos.id > foos.fid", "Foo.foos", "primary" - ) + "foos.id > foos.fid", "Foo.foos", "primary" + ) def test_no_equated_viewonly(self): - bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (self.tables.bars, - self.classes.Bar, - self.tables.bars_with_fks, - self.tables.foos_with_fks, - self.classes.Foo, - self.tables.foos) + bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = ( + self.tables.bars, + self.classes.Bar, + self.tables.bars_with_fks, + self.tables.foos_with_fks, + self.classes.Foo, + self.tables.foos) mapper(Foo, foos, properties={ - 'bars':relationship(Bar, - primaryjoin=foos.c.id>bars.c.fid, - viewonly=True)}) + 'bars': relationship(Bar, + primaryjoin=foos.c.id > bars.c.fid, + viewonly=True)}) mapper(Bar, bars) self._assert_raises_no_relevant_fks( @@ -3234,24 +3401,26 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest sa.orm.clear_mappers() mapper(Foo, foos_with_fks, properties={ - 'bars':relationship(Bar, - primaryjoin=foos_with_fks.c.id>bars_with_fks.c.fid, - viewonly=True)}) + 'bars': relationship( + Bar, + primaryjoin=foos_with_fks.c.id > bars_with_fks.c.fid, + viewonly=True)}) mapper(Bar, bars_with_fks) sa.orm.configure_mappers() def test_no_equated_self_ref_viewonly(self): - bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (self.tables.bars, - self.classes.Bar, - self.tables.bars_with_fks, - self.tables.foos_with_fks, - self.classes.Foo, - self.tables.foos) + bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = ( + self.tables.bars, + self.classes.Bar, + self.tables.bars_with_fks, + self.tables.foos_with_fks, + self.classes.Foo, + self.tables.foos) mapper(Foo, foos, properties={ - 'foos':relationship(Foo, - primaryjoin=foos.c.id>foos.c.fid, - viewonly=True)}) + 'foos': relationship(Foo, + primaryjoin=foos.c.id > foos.c.fid, + viewonly=True)}) mapper(Bar, bars) self._assert_raises_no_relevant_fks( @@ -3261,9 +3430,10 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest sa.orm.clear_mappers() mapper(Foo, foos_with_fks, properties={ - 'foos':relationship(Foo, - primaryjoin=foos_with_fks.c.id>foos_with_fks.c.fid, - viewonly=True)}) + 'foos': relationship( + Foo, + primaryjoin=foos_with_fks.c.id > foos_with_fks.c.fid, + viewonly=True)}) mapper(Bar, bars_with_fks) sa.orm.configure_mappers() @@ -3271,25 +3441,26 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest Foo, foos = self.classes.Foo, self.tables.foos mapper(Foo, foos, properties={ - 'foos':relationship(Foo, - primaryjoin=foos.c.id>foos.c.fid, - viewonly=True, - foreign_keys=[foos.c.fid])}) + 'foos': relationship(Foo, + primaryjoin=foos.c.id > foos.c.fid, + viewonly=True, + foreign_keys=[foos.c.fid])}) sa.orm.configure_mappers() eq_(Foo.foos.property.local_remote_pairs, [(foos.c.id, foos.c.fid)]) def test_equated(self): - bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (self.tables.bars, - self.classes.Bar, - self.tables.bars_with_fks, - self.tables.foos_with_fks, - self.classes.Foo, - self.tables.foos) + bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = ( + self.tables.bars, + self.classes.Bar, + self.tables.bars_with_fks, + self.tables.foos_with_fks, + self.classes.Foo, + self.tables.foos) mapper(Foo, foos, properties={ - 'bars':relationship(Bar, - primaryjoin=foos.c.id==bars.c.fid)}) + 'bars': relationship(Bar, + primaryjoin=foos.c.id == bars.c.fid)}) mapper(Bar, bars) self._assert_raises_no_relevant_fks( @@ -3299,8 +3470,9 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest sa.orm.clear_mappers() mapper(Foo, foos_with_fks, properties={ - 'bars':relationship(Bar, - primaryjoin=foos_with_fks.c.id==bars_with_fks.c.fid)}) + 'bars': relationship( + Bar, + primaryjoin=foos_with_fks.c.id == bars_with_fks.c.fid)}) mapper(Bar, bars_with_fks) sa.orm.configure_mappers() @@ -3308,24 +3480,23 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest Foo, foos = self.classes.Foo, self.tables.foos mapper(Foo, foos, properties={ - 'foos':relationship(Foo, - primaryjoin=foos.c.id==foos.c.fid)}) + 'foos': relationship(Foo, + primaryjoin=foos.c.id == foos.c.fid)}) self._assert_raises_no_relevant_fks( configure_mappers, "foos.id = foos.fid", "Foo.foos", "primary" ) - def test_equated_self_ref_wrong_fks(self): bars, Foo, foos = (self.tables.bars, - self.classes.Foo, - self.tables.foos) + self.classes.Foo, + self.tables.foos) mapper(Foo, foos, properties={ - 'foos':relationship(Foo, - primaryjoin=foos.c.id==foos.c.fid, - foreign_keys=[bars.c.id])}) + 'foos': relationship(Foo, + primaryjoin=foos.c.id == foos.c.fid, + foreign_keys=[bars.c.id])}) self._assert_raises_no_relevant_fks( configure_mappers, @@ -3333,7 +3504,8 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest ) -class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedTest): +class InvalidRelationshipEscalationTestM2M( + _RelationshipErrors, fixtures.MappedTest): @classmethod def define_tables(cls, metadata): @@ -3345,9 +3517,9 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT Column('id', Integer, primary_key=True)) Table('foobars_with_fks', metadata, - Column('fid', Integer, ForeignKey('foos.id')), - Column('bid', Integer, ForeignKey('bars.id')) - ) + Column('fid', Integer, ForeignKey('foos.id')), + Column('bid', Integer, ForeignKey('bars.id')) + ) Table('foobars_with_many_columns', metadata, Column('fid', Integer), @@ -3362,15 +3534,16 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT def setup_classes(cls): class Foo(cls.Basic): pass + class Bar(cls.Basic): pass def test_no_join(self): foobars, bars, Foo, Bar, foos = (self.tables.foobars, - self.tables.bars, - self.classes.Foo, - self.classes.Bar, - self.tables.foos) + self.tables.bars, + self.classes.Foo, + self.classes.Bar, + self.tables.foos) mapper(Foo, foos, properties={ 'bars': relationship(Bar, secondary=foobars)}) @@ -3384,15 +3557,15 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT def test_no_secondaryjoin(self): foobars, bars, Foo, Bar, foos = (self.tables.foobars, - self.tables.bars, - self.classes.Foo, - self.classes.Bar, - self.tables.foos) + self.tables.bars, + self.classes.Foo, + self.classes.Bar, + self.tables.foos) mapper(Foo, foos, properties={ 'bars': relationship(Bar, - secondary=foobars, - primaryjoin=foos.c.id > foobars.c.fid)}) + secondary=foobars, + primaryjoin=foos.c.id > foobars.c.fid)}) mapper(Bar, bars) self._assert_raises_no_join( @@ -3402,17 +3575,18 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT ) def test_no_fks(self): - foobars_with_many_columns, bars, Bar, foobars, Foo, foos = (self.tables.foobars_with_many_columns, - self.tables.bars, - self.classes.Bar, - self.tables.foobars, - self.classes.Foo, - self.tables.foos) + foobars_with_many_columns, bars, Bar, foobars, Foo, foos = ( + self.tables.foobars_with_many_columns, + self.tables.bars, + self.classes.Bar, + self.tables.foobars, + self.classes.Foo, + self.tables.foos) mapper(Foo, foos, properties={ 'bars': relationship(Bar, secondary=foobars, - primaryjoin=foos.c.id==foobars.c.fid, - secondaryjoin=foobars.c.bid==bars.c.id)}) + primaryjoin=foos.c.id == foobars.c.fid, + secondaryjoin=foobars.c.bid == bars.c.id)}) mapper(Bar, bars) sa.orm.configure_mappers() eq_( @@ -3426,12 +3600,13 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT sa.orm.clear_mappers() mapper(Foo, foos, properties={ - 'bars': relationship(Bar, - secondary=foobars_with_many_columns, - primaryjoin=foos.c.id == - foobars_with_many_columns.c.fid, - secondaryjoin=foobars_with_many_columns.c.bid == - bars.c.id)}) + 'bars': relationship( + Bar, + secondary=foobars_with_many_columns, + primaryjoin=foos.c.id == + foobars_with_many_columns.c.fid, + secondaryjoin=foobars_with_many_columns.c.bid == + bars.c.id)}) mapper(Bar, bars) sa.orm.configure_mappers() eq_( @@ -3445,17 +3620,17 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT def test_local_col_setup(self): foobars_with_fks, bars, Bar, Foo, foos = ( - self.tables.foobars_with_fks, - self.tables.bars, - self.classes.Bar, - self.classes.Foo, - self.tables.foos) + self.tables.foobars_with_fks, + self.tables.bars, + self.classes.Bar, + self.classes.Foo, + self.tables.foos) # ensure m2m backref is set up with correct annotations # [ticket:2578] mapper(Foo, foos, properties={ 'bars': relationship(Bar, secondary=foobars_with_fks, backref="foos") - }) + }) mapper(Bar, bars) sa.orm.configure_mappers() eq_( @@ -3467,65 +3642,66 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT set([bars.c.id]) ) - - def test_bad_primaryjoin(self): - foobars_with_fks, bars, Bar, foobars, Foo, foos = (self.tables.foobars_with_fks, - self.tables.bars, - self.classes.Bar, - self.tables.foobars, - self.classes.Foo, - self.tables.foos) + foobars_with_fks, bars, Bar, foobars, Foo, foos = ( + self.tables.foobars_with_fks, + self.tables.bars, + self.classes.Bar, + self.tables.foobars, + self.classes.Foo, + self.tables.foos) mapper(Foo, foos, properties={ 'bars': relationship(Bar, - secondary=foobars, - primaryjoin=foos.c.id > foobars.c.fid, - secondaryjoin=foobars.c.bid<=bars.c.id)}) + secondary=foobars, + primaryjoin=foos.c.id > foobars.c.fid, + secondaryjoin=foobars.c.bid <= bars.c.id)}) mapper(Bar, bars) self._assert_raises_no_equality( - configure_mappers, - 'foos.id > foobars.fid', - "Foo.bars", - "primary") + configure_mappers, + 'foos.id > foobars.fid', + "Foo.bars", + "primary") sa.orm.clear_mappers() mapper(Foo, foos, properties={ - 'bars': relationship(Bar, - secondary=foobars_with_fks, - primaryjoin=foos.c.id > foobars_with_fks.c.fid, - secondaryjoin=foobars_with_fks.c.bid<=bars.c.id)}) + 'bars': relationship( + Bar, + secondary=foobars_with_fks, + primaryjoin=foos.c.id > foobars_with_fks.c.fid, + secondaryjoin=foobars_with_fks.c.bid <= bars.c.id)}) mapper(Bar, bars) self._assert_raises_no_equality( - configure_mappers, - 'foos.id > foobars_with_fks.fid', - "Foo.bars", - "primary") + configure_mappers, + 'foos.id > foobars_with_fks.fid', + "Foo.bars", + "primary") sa.orm.clear_mappers() mapper(Foo, foos, properties={ - 'bars': relationship(Bar, - secondary=foobars_with_fks, - primaryjoin=foos.c.id > foobars_with_fks.c.fid, - secondaryjoin=foobars_with_fks.c.bid<=bars.c.id, - viewonly=True)}) + 'bars': relationship( + Bar, + secondary=foobars_with_fks, + primaryjoin=foos.c.id > foobars_with_fks.c.fid, + secondaryjoin=foobars_with_fks.c.bid <= bars.c.id, + viewonly=True)}) mapper(Bar, bars) sa.orm.configure_mappers() def test_bad_secondaryjoin(self): foobars, bars, Foo, Bar, foos = (self.tables.foobars, - self.tables.bars, - self.classes.Foo, - self.classes.Bar, - self.tables.foos) + self.tables.bars, + self.classes.Foo, + self.classes.Bar, + self.tables.foos) mapper(Foo, foos, properties={ - 'bars':relationship(Bar, - secondary=foobars, - primaryjoin=foos.c.id == foobars.c.fid, - secondaryjoin=foobars.c.bid <= bars.c.id, - foreign_keys=[foobars.c.fid])}) + 'bars': relationship(Bar, + secondary=foobars, + primaryjoin=foos.c.id == foobars.c.fid, + secondaryjoin=foobars.c.bid <= bars.c.id, + foreign_keys=[foobars.c.fid])}) mapper(Bar, bars) self._assert_raises_no_relevant_fks( configure_mappers, @@ -3536,17 +3712,17 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT def test_no_equated_secondaryjoin(self): foobars, bars, Foo, Bar, foos = (self.tables.foobars, - self.tables.bars, - self.classes.Foo, - self.classes.Bar, - self.tables.foos) + self.tables.bars, + self.classes.Foo, + self.classes.Bar, + self.tables.foos) mapper(Foo, foos, properties={ - 'bars':relationship(Bar, - secondary=foobars, - primaryjoin=foos.c.id == foobars.c.fid, - secondaryjoin=foobars.c.bid <= bars.c.id, - foreign_keys=[foobars.c.fid, foobars.c.bid])}) + 'bars': relationship(Bar, + secondary=foobars, + primaryjoin=foos.c.id == foobars.c.fid, + secondaryjoin=foobars.c.bid <= bars.c.id, + foreign_keys=[foobars.c.fid, foobars.c.bid])}) mapper(Bar, bars) self._assert_raises_no_equality( @@ -3556,6 +3732,7 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT "secondary" ) + class ActiveHistoryFlagTest(_fixtures.FixtureTest): run_inserts = None run_deletes = None @@ -3572,27 +3749,27 @@ class ActiveHistoryFlagTest(_fixtures.FixtureTest): setattr(obj, attrname, newvalue) eq_( attributes.get_history(obj, attrname), - ([newvalue,], (), [oldvalue,]) + ([newvalue, ], (), [oldvalue, ]) ) def test_column_property_flag(self): User, users = self.classes.User, self.tables.users mapper(User, users, properties={ - 'name':column_property(users.c.name, - active_history=True) + 'name': column_property(users.c.name, + active_history=True) }) u1 = User(name='jack') self._test_attribute(u1, 'name', 'ed') def test_relationship_property_flag(self): Address, addresses, users, User = (self.classes.Address, - self.tables.addresses, - self.tables.users, - self.classes.User) + self.tables.addresses, + self.tables.users, + self.classes.User) mapper(Address, addresses, properties={ - 'user':relationship(User, active_history=True) + 'user': relationship(User, active_history=True) }) mapper(User, users) u1 = User(name='jack') @@ -3604,27 +3781,30 @@ class ActiveHistoryFlagTest(_fixtures.FixtureTest): Order, orders = self.classes.Order, self.tables.orders class MyComposite(object): + def __init__(self, description, isopen): self.description = description self.isopen = isopen + def __composite_values__(self): return [self.description, self.isopen] + def __eq__(self, other): return isinstance(other, MyComposite) and \ other.description == self.description mapper(Order, orders, properties={ - 'composite':composite( - MyComposite, - orders.c.description, - orders.c.isopen, - active_history=True) + 'composite': composite( + MyComposite, + orders.c.description, + orders.c.isopen, + active_history=True) }) o1 = Order(composite=MyComposite('foo', 1)) self._test_attribute(o1, "composite", MyComposite('bar', 1)) - class RelationDeprecationTest(fixtures.MappedTest): + """test usage of the old 'relation' function.""" run_inserts = 'once' @@ -3655,34 +3835,32 @@ class RelationDeprecationTest(fixtures.MappedTest): def fixtures(cls): return dict( users_table=( - ('id', 'name'), - (1, 'jack'), - (2, 'ed'), - (3, 'fred'), - (4, 'chuck')), + ('id', 'name'), + (1, 'jack'), + (2, 'ed'), + (3, 'fred'), + (4, 'chuck')), addresses_table=( - ('id', 'user_id', 'email_address', 'purpose', 'bounces'), - (1, 1, 'jack@jack.home', 'Personal', 0), - (2, 1, 'jack@jack.bizz', 'Work', 1), - (3, 2, 'ed@foo.bar', 'Personal', 0), - (4, 3, 'fred@the.fred', 'Personal', 10))) + ('id', 'user_id', 'email_address', 'purpose', 'bounces'), + (1, 1, 'jack@jack.home', 'Personal', 0), + (2, 1, 'jack@jack.bizz', 'Work', 1), + (3, 2, 'ed@foo.bar', 'Personal', 0), + (4, 3, 'fred@the.fred', 'Personal', 10))) def test_relation(self): - addresses_table, User, users_table, Address = (self.tables.addresses_table, - self.classes.User, - self.tables.users_table, - self.classes.Address) + addresses_table, User, users_table, Address = ( + self.tables.addresses_table, + self.classes.User, + self.tables.users_table, + self.classes.Address) mapper(User, users_table, properties=dict( addresses=relation(Address, backref='user'), - )) + )) mapper(Address, addresses_table) session = create_session() - ed = session.query(User).filter(User.addresses.any( + session.query(User).filter(User.addresses.any( Address.email_address == 'ed@foo.bar')).one() - - - -- cgit v1.2.1 From 55cad302cee51aff6d2bcda2f2f963004d54e6de Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 18 Oct 2014 17:56:13 -0400 Subject: - A warning is emitted in the case of multiple relationships that ultimately will populate a foreign key column in conflict with another, where the relationships are attempting to copy values from different source columns. This occurs in the case where composite foreign keys with overlapping columns are mapped to relationships that each refer to a different referenced column. A new documentation section illustrates the example as well as how to overcome the issue by specifying "foreign" columns specifically on a per-relationship basis. fixes #3230 --- doc/build/changelog/changelog_10.rst | 18 +++++ doc/build/orm/relationships.rst | 135 +++++++++++++++++++++++++++++++++-- lib/sqlalchemy/orm/relationships.py | 51 +++++++++++++ test/orm/test_assorted_eager.py | 4 +- test/orm/test_joins.py | 39 +++++----- test/orm/test_relationships.py | 80 ++++++++++++++++++++- 6 files changed, 302 insertions(+), 25 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 5aed3bddd..4454dd98a 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,24 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: bug, orm + :tickets: 3230 + + A warning is emitted in the case of multiple relationships that + ultimately will populate a foreign key column in conflict with + another, where the relationships are attempting to copy values + from different source columns. This occurs in the case where + composite foreign keys with overlapping columns are mapped to + relationships that each refer to a different referenced column. + A new documentation section illustrates the example as well as how + to overcome the issue by specifying "foreign" columns specifically + on a per-relationship basis. + + .. seealso:: + + :ref:`relationship_overlapping_foreignkeys` + .. change:: :tags: feature, sql :tickets: 3172 diff --git a/doc/build/orm/relationships.rst b/doc/build/orm/relationships.rst index c65f06cbc..f512251a7 100644 --- a/doc/build/orm/relationships.rst +++ b/doc/build/orm/relationships.rst @@ -1079,12 +1079,15 @@ The above relationship will produce a join like:: ON host_entry_1.ip_address = CAST(host_entry.content AS INET) An alternative syntax to the above is to use the :func:`.foreign` and -:func:`.remote` :term:`annotations`, inline within the :paramref:`~.relationship.primaryjoin` expression. +:func:`.remote` :term:`annotations`, +inline within the :paramref:`~.relationship.primaryjoin` expression. This syntax represents the annotations that :func:`.relationship` normally applies by itself to the join condition given the :paramref:`~.relationship.foreign_keys` and -:paramref:`~.relationship.remote_side` arguments; the functions are provided in the API in the -rare case that :func:`.relationship` can't determine the exact location -of these features on its own:: +:paramref:`~.relationship.remote_side` arguments. These functions may +be more succinct when an explicit join condition is present, and additionally +serve to mark exactly the column that is "foreign" or "remote" independent +of whether that column is stated multiple times or within complex +SQL expressions:: from sqlalchemy.orm import foreign, remote @@ -1157,6 +1160,130 @@ Will render as:: flag to assist in the creation of :func:`.relationship` constructs using custom operators. +.. _relationship_overlapping_foreignkeys: + +Overlapping Foreign Keys +~~~~~~~~~~~~~~~~~~~~~~~~ + +A rare scenario can arise when composite foreign keys are used, such that +a single column may be the subject of more than one column +referred to via foreign key constraint. + +Consider an (admittedly complex) mapping such as the ``Magazine`` object, +referred to both by the ``Writer`` object and the ``Article`` object +using a composite primary key scheme that includes ``magazine_id`` +for both; then to make ``Article`` refer to ``Writer`` as well, +``Article.magazine_id`` is involved in two separate relationships; +``Article.magazine`` and ``Article.writer``:: + + class Magazine(Base): + __tablename__ = 'magazine' + + id = Column(Integer, primary_key=True) + + + class Article(Base): + __tablename__ = 'article' + + article_id = Column(Integer) + magazine_id = Column(ForeignKey('magazine.id')) + writer_id = Column() + + magazine = relationship("Magazine") + writer = relationship("Writer") + + __table_args__ = ( + PrimaryKeyConstraint('article_id', 'magazine_id'), + ForeignKeyConstraint( + ['writer_id', 'magazine_id'], + ['writer.id', 'writer.magazine_id'] + ), + ) + + + class Writer(Base): + __tablename__ = 'writer' + + id = Column(Integer, primary_key=True) + magazine_id = Column(ForeignKey('magazine.id'), primary_key=True) + magazine = relationship("Magazine") + +When the above mapping is configured, we will see this warning emitted:: + + SAWarning: relationship 'Article.writer' will copy column + writer.magazine_id to column article.magazine_id, + which conflicts with relationship(s): 'Article.magazine' + (copies magazine.id to article.magazine_id). Consider applying + viewonly=True to read-only relationships, or provide a primaryjoin + condition marking writable columns with the foreign() annotation. + +What this refers to originates from the fact that ``Article.magazine_id`` is +the subject of two different foreign key constraints; it refers to +``Magazine.id`` directly as a source column, but also refers to +``Writer.magazine_id`` as a source column in the context of the +composite key to ``Writer``. If we associate an ``Article`` with a +particular ``Magazine``, but then associate the ``Article`` with a +``Writer`` that's associated with a *different* ``Magazine``, the ORM +will overwrite ``Article.magazine_id`` non-deterministically, silently +changing which magazine we refer towards; it may +also attempt to place NULL into this columnn if we de-associate a +``Writer`` from an ``Article``. The warning lets us know this is the case. + +To solve this, we need to break out the behavior of ``Article`` to include +all three of the following features: + +1. ``Article`` first and foremost writes to + ``Article.magazine_id`` based on data persisted in the ``Article.magazine`` + relationship only, that is a value copied from ``Magazine.id``. + +2. ``Article`` can write to ``Article.writer_id`` on behalf of data + persisted in the ``Article.writer`` relationship, but only the + ``Writer.id`` column; the ``Writer.magazine_id`` column should not + be written into ``Article.magazine_id`` as it ultimately is sourced + from ``Magazine.id``. + +3. ``Article`` takes ``Article.magazine_id`` into account when loading + ``Article.writer``, even though it *doesn't* write to it on behalf + of this relationship. + +To get just #1 and #2, we could specify only ``Article.writer_id`` as the +"foreign keys" for ``Article.writer``:: + + class Article(Base): + # ... + + writer = relationship("Writer", foreign_keys='Article.writer_id') + +However, this has the effect of ``Article.writer`` not taking +``Article.magazine_id`` into account when querying against ``Writer``: + +.. sourcecode:: sql + + SELECT article.article_id AS article_article_id, + article.magazine_id AS article_magazine_id, + article.writer_id AS article_writer_id + FROM article + JOIN writer ON writer.id = article.writer_id + +Therefore, to get at all of #1, #2, and #3, we express the join condition +as well as which columns to be written by combining +:paramref:`~.relationship.primaryjoin` fully, along with either the +:paramref:`~.relationship.foreign_keys` argument, or more succinctly by +annotating with :func:`~.orm.foreign`:: + + class Article(Base): + # ... + + writer = relationship( + "Writer", + primaryjoin="and_(Writer.id == foreign(Article.writer_id), " + "Writer.magazine_id == Article.magazine_id)") + +.. versionchanged:: 1.0.0 the ORM will attempt to warn when a column is used + as the synchronization target from more than one relationship + simultaneously. + + Non-relational Comparisons / Materialized Path ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index 56a33742d..4a6159144 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -16,6 +16,7 @@ and `secondaryjoin` aspects of :func:`.relationship`. from __future__ import absolute_import from .. import sql, util, exc as sa_exc, schema, log +import weakref from .util import CascadeOptions, _orm_annotate, _orm_deannotate from . import dependency from . import attributes @@ -1532,6 +1533,7 @@ class RelationshipProperty(StrategizedProperty): self._check_cascade_settings(self._cascade) self._post_init() self._generate_backref() + self._join_condition._warn_for_conflicting_sync_targets() super(RelationshipProperty, self).do_init() self._lazy_strategy = self._get_strategy((("lazy", "select"),)) @@ -2519,6 +2521,55 @@ class JoinCondition(object): self.secondary_synchronize_pairs = \ self._deannotate_pairs(secondary_sync_pairs) + _track_sync_targets = weakref.WeakKeyDictionary() + + def _warn_for_conflicting_sync_targets(self): + if not self.support_sync: + return + + # totally complex code that takes place for virtually all + # relationships, detecting an incredibly rare edge case, + # and even then, all just to emit a warning. + # we would like to detect if we are synchronizing any column + # pairs in conflict with another relationship that wishes to sync + # an entirely different column to the same target. This is typically + # when using complex overlapping composite foreign keys. + for from_, to_ in [ + (from_, to_) + for (from_, to_) in self.synchronize_pairs + ] + [ + (from_, to_) for + (from_, to_) in self.secondary_synchronize_pairs + ]: + if to_ not in self._track_sync_targets: + self._track_sync_targets[to_] = weakref.WeakKeyDictionary( + {self.prop: from_}) + else: + other_props = [] + prop_to_from = self._track_sync_targets[to_] + for pr, fr_ in prop_to_from.items(): + if pr.mapper in mapperlib._mapper_registry and \ + fr_ is not from_ and \ + pr not in self.prop._reverse_property: + other_props.append((pr, fr_)) + + if other_props: + util.warn( + "relationship '%s' will copy column %s to column %s, " + "which conflicts with relationship(s): %s. " + "Consider applying " + "viewonly=True to read-only relationships, or provide " + "a primaryjoin condition marking writable columns " + "with the foreign() annotation." % ( + self.prop, + from_, to_, + ", ".join( + "'%s' (copies %s to %s)" % (pr, fr_, to_) + for (pr, fr_) in other_props) + ) + ) + self._track_sync_targets[to_][self.prop] = from_ + @util.memoized_property def remote_columns(self): return self._gather_join_annotations("remote") diff --git a/test/orm/test_assorted_eager.py b/test/orm/test_assorted_eager.py index 2bee3cbd6..48faa172f 100644 --- a/test/orm/test_assorted_eager.py +++ b/test/orm/test_assorted_eager.py @@ -82,8 +82,8 @@ class EagerTest(fixtures.MappedTest): mapper(Category, categories) mapper(Option, options, properties=dict( - owner=relationship(Owner), - test=relationship(Thing))) + owner=relationship(Owner, viewonly=True), + test=relationship(Thing, viewonly=True))) mapper(Thing, tests, properties=dict( owner=relationship(Owner, backref='tests'), diff --git a/test/orm/test_joins.py b/test/orm/test_joins.py index 40bc01b5d..eba47dbec 100644 --- a/test/orm/test_joins.py +++ b/test/orm/test_joins.py @@ -361,6 +361,27 @@ class InheritedJoinTest(fixtures.MappedTest, AssertsCompiledSQL): ) +class JoinOnSynonymTest(_fixtures.FixtureTest, AssertsCompiledSQL): + @classmethod + def setup_mappers(cls): + User = cls.classes.User + Address = cls.classes.Address + users, addresses = (cls.tables.users, cls.tables.addresses) + mapper(User, users, properties={ + 'addresses': relationship(Address), + 'ad_syn': synonym("addresses") + }) + mapper(Address, addresses) + + def test_join_on_synonym(self): + User = self.classes.User + self.assert_compile( + Session().query(User).join(User.ad_syn), + "SELECT users.id AS users_id, users.name AS users_name " + "FROM users JOIN addresses ON users.id = addresses.user_id" + ) + + class JoinTest(QueryTest, AssertsCompiledSQL): __dialect__ = 'default' @@ -409,24 +430,6 @@ class JoinTest(QueryTest, AssertsCompiledSQL): sess.query(literal_column('x'), User).join, Address ) - def test_join_on_synonym(self): - - class User(object): - pass - class Address(object): - pass - users, addresses = (self.tables.users, self.tables.addresses) - mapper(User, users, properties={ - 'addresses':relationship(Address), - 'ad_syn':synonym("addresses") - }) - mapper(Address, addresses) - self.assert_compile( - Session().query(User).join(User.ad_syn), - "SELECT users.id AS users_id, users.name AS users_name " - "FROM users JOIN addresses ON users.id = addresses.user_id" - ) - def test_multi_tuple_form(self): """test the 'tuple' form of join, now superseded by the two-element join() form. diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py index 4c5a5abee..2a15ce666 100644 --- a/test/orm/test_relationships.py +++ b/test/orm/test_relationships.py @@ -672,12 +672,89 @@ class CompositeSelfRefFKTest(fixtures.MappedTest): self._test() + def test_overlapping_warning(self): + Employee, Company, employee_t, company_t = (self.classes.Employee, + self.classes.Company, + self.tables.employee_t, + self.tables.company_t) + + mapper(Company, company_t) + mapper(Employee, employee_t, properties={ + 'company': relationship(Company, backref='employees'), + 'reports_to': relationship( + Employee, + primaryjoin=sa.and_( + remote(employee_t.c.emp_id) == employee_t.c.reports_to_id, + remote(employee_t.c.company_id) == employee_t.c.company_id + ), + backref=backref('employees') + ) + }) + + assert_raises_message( + exc.SAWarning, + r"relationship .* will copy column .* to column " + "employee_t.company_id, which conflicts with relationship\(s\)", + configure_mappers + ) + + def test_annotated_no_overwriting(self): + Employee, Company, employee_t, company_t = (self.classes.Employee, + self.classes.Company, + self.tables.employee_t, + self.tables.company_t) + + mapper(Company, company_t) + mapper(Employee, employee_t, properties={ + 'company': relationship(Company, backref='employees'), + 'reports_to': relationship( + Employee, + primaryjoin=sa.and_( + remote(employee_t.c.emp_id) == + foreign(employee_t.c.reports_to_id), + remote(employee_t.c.company_id) == employee_t.c.company_id + ), + backref=backref('employees') + ) + }) + + self._test_no_warning() + + def _test_no_overwrite(self, sess, expect_failure): + # test [ticket:3230] + + Employee, Company = self.classes.Employee, self.classes.Company + + c1 = sess.query(Company).filter_by(name='c1').one() + e3 = sess.query(Employee).filter_by(name='emp3').one() + e3.reports_to = None + + if expect_failure: + # if foreign() isn't applied specifically to + # employee_t.c.reports_to_id only, then + # employee_t.c.company_id goes foreign as well and then + # this happens + assert_raises_message( + AssertionError, + "Dependency rule tried to blank-out primary key column " + "'employee_t.company_id'", + sess.flush + ) + else: + sess.flush() + eq_(e3.company, c1) + + @testing.emits_warning("relationship .* will copy column ") def _test(self): + self._test_no_warning(overwrites=True) + + def _test_no_warning(self, overwrites=False): self._test_relationships() sess = Session() self._setup_data(sess) self._test_lazy_relations(sess) self._test_join_aliasing(sess) + self._test_no_overwrite(sess, expect_failure=overwrites) def _test_relationships(self): configure_mappers() @@ -3044,7 +3121,8 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL, secondaryjoin=d.c.id == b.c.d_id, #primaryjoin=and_(a.c.b_id == j.c.b_id, a.c.id == j.c.c_a_id), #secondaryjoin=d.c.id == j.c.b_d_id, - uselist=False + uselist=False, + viewonly=True ) }) mapper(B, b, properties={ -- cgit v1.2.1 From b88f9e555e0c05b5bc741aa44d7e86b1a6724080 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 19 Oct 2014 06:43:53 -0400 Subject: - for #3230, scale back the check to only look at columns that already have more than one ForeignKeyConstraint referring to them. This limits the check to what we hope is the most common case, but we benefit that the memory and config-time impact is scaled back dramatically. --- lib/sqlalchemy/orm/relationships.py | 35 ++++++++++++++++++++--------------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index 4a6159144..86f1b3f82 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -2521,32 +2521,37 @@ class JoinCondition(object): self.secondary_synchronize_pairs = \ self._deannotate_pairs(secondary_sync_pairs) - _track_sync_targets = weakref.WeakKeyDictionary() + _track_overlapping_sync_targets = weakref.WeakKeyDictionary() def _warn_for_conflicting_sync_targets(self): if not self.support_sync: return - # totally complex code that takes place for virtually all - # relationships, detecting an incredibly rare edge case, - # and even then, all just to emit a warning. # we would like to detect if we are synchronizing any column # pairs in conflict with another relationship that wishes to sync - # an entirely different column to the same target. This is typically - # when using complex overlapping composite foreign keys. + # an entirely different column to the same target. This is a + # very rare edge case so we will try to minimize the memory/overhead + # impact of this check for from_, to_ in [ - (from_, to_) - for (from_, to_) in self.synchronize_pairs + (from_, to_) for (from_, to_) in self.synchronize_pairs ] + [ - (from_, to_) for - (from_, to_) in self.secondary_synchronize_pairs + (from_, to_) for (from_, to_) in self.secondary_synchronize_pairs ]: - if to_ not in self._track_sync_targets: - self._track_sync_targets[to_] = weakref.WeakKeyDictionary( - {self.prop: from_}) + # save ourselves a ton of memory and overhead by only + # considering columns that are subject to a overlapping + # FK constraints at the core level. This condition can arise + # if multiple relationships overlap foreign() directly, but + # we're going to assume it's typically a ForeignKeyConstraint- + # level configuration that benefits from this warning. + if len(to_.foreign_keys) < 2: + continue + + if to_ not in self._track_overlapping_sync_targets: + self._track_overlapping_sync_targets[to_] = \ + weakref.WeakKeyDictionary({self.prop: from_}) else: other_props = [] - prop_to_from = self._track_sync_targets[to_] + prop_to_from = self._track_overlapping_sync_targets[to_] for pr, fr_ in prop_to_from.items(): if pr.mapper in mapperlib._mapper_registry and \ fr_ is not from_ and \ @@ -2568,7 +2573,7 @@ class JoinCondition(object): for (pr, fr_) in other_props) ) ) - self._track_sync_targets[to_][self.prop] = from_ + self._track_overlapping_sync_targets[to_][self.prop] = from_ @util.memoized_property def remote_columns(self): -- cgit v1.2.1 From dddb74bbd3892f71c594368af3762808aaf3ed51 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 19 Oct 2014 13:37:48 -0400 Subject: - control the module name of the exception here for py3k compat --- test/engine/test_execute.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py index 00b4ba7f3..5c3279ba9 100644 --- a/test/engine/test_execute.py +++ b/test/engine/test_execute.py @@ -25,6 +25,10 @@ from sqlalchemy.util import nested users, metadata, users_autoinc = None, None, None +class SomeException(Exception): + pass + + class ExecuteTest(fixtures.TestBase): __backend__ = True @@ -280,12 +284,13 @@ class ExecuteTest(fixtures.TestBase): impl = Integer def process_bind_param(self, value, dialect): - raise Exception("nope") + raise SomeException("nope") def _go(conn): assert_raises_message( tsa.exc.StatementError, - r"\(exceptions.Exception\) nope \[SQL\: u?'SELECT 1 ", + r"\(test.engine.test_execute.SomeException\) " + "nope \[SQL\: u?'SELECT 1 ", conn.execute, select([1]). where( @@ -561,7 +566,7 @@ class ConvenienceExecuteTest(fixtures.TablesTest): if is_transaction: conn = conn.connection conn.execute(self.table.insert().values(a=x, b=value)) - raise Exception("breakage") + raise SomeException("breakage") return go def _assert_no_data(self): @@ -1597,7 +1602,7 @@ class HandleErrorTest(fixtures.TestBase): listener = Mock(return_value=None) event.listen(engine, 'dbapi_error', listener) - nope = Exception("nope") + nope = SomeException("nope") class MyType(TypeDecorator): impl = Integer @@ -1608,7 +1613,8 @@ class HandleErrorTest(fixtures.TestBase): with engine.connect() as conn: assert_raises_message( tsa.exc.StatementError, - r"\(exceptions.Exception\) nope \[SQL\: u?'SELECT 1 ", + r"\(test.engine.test_execute.SomeException\) " + "nope \[SQL\: u?'SELECT 1 ", conn.execute, select([1]).where( column('foo') == literal('bar', MyType())) @@ -1788,7 +1794,7 @@ class HandleErrorTest(fixtures.TestBase): listener = Mock(return_value=None) event.listen(engine, 'handle_error', listener) - nope = Exception("nope") + nope = SomeException("nope") class MyType(TypeDecorator): impl = Integer @@ -1799,7 +1805,8 @@ class HandleErrorTest(fixtures.TestBase): with engine.connect() as conn: assert_raises_message( tsa.exc.StatementError, - r"\(exceptions.Exception\) nope \[SQL\: u?'SELECT 1 ", + r"\(test.engine.test_execute.SomeException\) " + "nope \[SQL\: u?'SELECT 1 ", conn.execute, select([1]).where( column('foo') == literal('bar', MyType())) -- cgit v1.2.1 From 38bc8098419d7b1d4ddb975d85268515f52a3969 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 19 Oct 2014 16:53:45 -0400 Subject: - Fixed bug where :meth:`.Session.expunge` would not fully detach the given object if the object had been subject to a delete operation that was flushed, but not committed. This would also affect related operations like :func:`.make_transient`. fixes #3139 --- doc/build/changelog/changelog_10.rst | 13 ++++++++++ doc/build/changelog/migration_10.rst | 33 ++++++++++++++++++++++++ lib/sqlalchemy/orm/session.py | 12 ++++++--- lib/sqlalchemy/orm/state.py | 11 +++++++- test/orm/test_session.py | 50 +++++++++++++++++++++++++++++++++++- 5 files changed, 113 insertions(+), 6 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 4454dd98a..18742f81e 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,19 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: bug, orm + :tickets: 3139 + + Fixed bug where :meth:`.Session.expunge` would not fully detach + the given object if the object had been subject to a delete + operation that was flushed, but not committed. This would also + affect related operations like :func:`.make_transient`. + + .. seealso:: + + :ref:`bug_3139` + .. change:: :tags: bug, orm :tickets: 3230 diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index 3591ee0e2..c025390d2 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -927,6 +927,39 @@ symbol, and no change to the object's state occurs. :ticket:`3061` +.. _bug_3139: + +session.expunge() will fully detach an object that's been deleted +----------------------------------------------------------------- + +The behavior of :meth:`.Session.expunge` had a bug that caused an +inconsistency in behavior regarding deleted objects. The +:func:`.object_session` function as well as the :attr:`.InstanceState.session` +attribute would still report object as belonging to the :class:`.Session` +subsequent to the expunge:: + + u1 = sess.query(User).first() + sess.delete(u1) + + sess.flush() + + assert u1 not in sess + assert inspect(u1).session is sess # this is normal before commit + + sess.expunge(u1) + + assert u1 not in sess + assert inspect(u1).session is None # would fail + +Note that it is normal for ``u1 not in sess`` to be True while +``inspect(u1).session`` still refers to the session, while the transaction +is ongoing subsequent to the delete operation and :meth:`.Session.expunge` +has not been called; the full detachment normally completes once the +transaction is committed. This issue would also impact functions +that rely on :meth:`.Session.expunge` such as :func:`.make_transient`. + +:ticket:`3139` + .. _migration_yield_per_eager_loading: Joined/Subquery eager loading explicitly disallowed with yield_per diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index db9d3a51d..f23983cbc 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -292,7 +292,7 @@ class SessionTransaction(object): for s in self.session.identity_map.all_states(): s._expire(s.dict, self.session.identity_map._modified) for s in self._deleted: - s.session_id = None + s._detach() self._deleted.clear() elif self.nested: self._parent._new.update(self._new) @@ -1409,6 +1409,7 @@ class Session(_SessionClassMethods): state._detach() elif self.transaction: self.transaction._deleted.pop(state, None) + state._detach() def _register_newly_persistent(self, states): for state in states: @@ -2449,16 +2450,19 @@ def make_transient_to_detached(instance): def object_session(instance): - """Return the ``Session`` to which instance belongs. + """Return the :class:`.Session` to which the given instance belongs. - If the instance is not a mapped instance, an error is raised. + This is essentially the same as the :attr:`.InstanceState.session` + accessor. See that attribute for details. """ try: - return _state_session(attributes.instance_state(instance)) + state = attributes.instance_state(instance) except exc.NO_STATE: raise exc.UnmappedInstanceError(instance) + else: + return _state_session(state) _new_sessionid = util.counter() diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py index 4756f1707..560149de5 100644 --- a/lib/sqlalchemy/orm/state.py +++ b/lib/sqlalchemy/orm/state.py @@ -145,7 +145,16 @@ class InstanceState(interfaces.InspectionAttr): @util.dependencies("sqlalchemy.orm.session") def session(self, sessionlib): """Return the owning :class:`.Session` for this instance, - or ``None`` if none available.""" + or ``None`` if none available. + + Note that the result here can in some cases be *different* + from that of ``obj in session``; an object that's been deleted + will report as not ``in session``, however if the transaction is + still in progress, this attribute will still refer to that session. + Only when the transaction is completed does the object become + fully detached under normal circumstances. + + """ return sessionlib._state_session(self) @property diff --git a/test/orm/test_session.py b/test/orm/test_session.py index b0b00d5ed..96728612d 100644 --- a/test/orm/test_session.py +++ b/test/orm/test_session.py @@ -204,6 +204,7 @@ class SessionUtilTest(_fixtures.FixtureTest): sess.flush() make_transient(u1) sess.rollback() + assert attributes.instance_state(u1).transient def test_make_transient_to_detached(self): users, User = self.tables.users, self.classes.User @@ -661,7 +662,7 @@ class SessionStateTest(_fixtures.FixtureTest): go() eq_(canary, [False]) - def test_deleted_expunged(self): + def test_deleted_auto_expunged(self): users, User = self.tables.users, self.classes.User mapper(User, users) @@ -682,6 +683,53 @@ class SessionStateTest(_fixtures.FixtureTest): assert object_session(u1) is None + def test_explicit_expunge_pending(self): + users, User = self.tables.users, self.classes.User + + mapper(User, users) + sess = Session() + u1 = User(name='x') + sess.add(u1) + + sess.flush() + sess.expunge(u1) + + assert u1 not in sess + assert object_session(u1) is None + + sess.rollback() + + assert u1 not in sess + assert object_session(u1) is None + + def test_explicit_expunge_deleted(self): + users, User = self.tables.users, self.classes.User + + mapper(User, users) + sess = Session() + sess.add(User(name='x')) + sess.commit() + + u1 = sess.query(User).first() + sess.delete(u1) + + sess.flush() + + assert was_deleted(u1) + assert u1 not in sess + assert object_session(u1) is sess + + sess.expunge(u1) + assert was_deleted(u1) + assert u1 not in sess + assert object_session(u1) is None + + sess.rollback() + assert was_deleted(u1) + assert u1 not in sess + assert object_session(u1) is None + + class SessionStateWFixtureTest(_fixtures.FixtureTest): __backend__ = True -- cgit v1.2.1 From ade27f35cb4911306404dcc74cce8bbf6f7d37bb Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 19 Oct 2014 18:26:14 -0400 Subject: - Reversing a change that was made in 0.9, the "singleton" nature of the "constants" :func:`.null`, :func:`.true`, and :func:`.false` has been reverted. These functions returning a "singleton" object had the effect that different instances would be treated as the same regardless of lexical use, which in particular would impact the rendering of the columns clause of a SELECT statement. fixes #3170 --- doc/build/changelog/changelog_10.rst | 15 +++++++++++++++ doc/build/changelog/migration_10.rst | 21 +++++++++++++++++++++ lib/sqlalchemy/sql/elements.py | 24 ++++++++++-------------- lib/sqlalchemy/sql/expression.py | 6 +++--- test/sql/test_operators.py | 21 ++++++++++++++++++++- 5 files changed, 69 insertions(+), 18 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 18742f81e..8351b5cce 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,21 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: bug, sql + :tickets: 3170 + + Reversing a change that was made in 0.9, the "singleton" nature + of the "constants" :func:`.null`, :func:`.true`, and :func:`.false` + has been reverted. These functions returning a "singleton" object + had the effect that different instances would be treated as the + same regardless of lexical use, which in particular would impact + the rendering of the columns clause of a SELECT statement. + + .. seealso:: + + :ref:`bug_3170` + .. change:: :tags: bug, orm :tickets: 3139 diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index c025390d2..65a8d4431 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -789,6 +789,27 @@ would again fail; these have also been fixed. :ticket:`3148` :ticket:`3188` +.. _bug_3170: + +null(), false() and true() constants are no longer singletons +------------------------------------------------------------- + +These three constants were changed to return a "singleton" value +in 0.9; unfortunately, that would lead to a query like the following +to not render as expected:: + + select([null(), null()]) + +rendering only ``SELECT NULL AS anon_1``, because the two :func:`.null` +constructs would come out as the same ``NULL`` object, and +SQLAlchemy's Core model is based on object identity in order to +determine lexical significance. The change in 0.9 had no +importance other than the desire to save on object overhead; in general, +an unnamed construct needs to stay lexically unique so that it gets +labeled uniquely. + +:ticket:`3170` + .. _behavioral_changes_orm_10: Behavioral Changes - ORM diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 444273e67..4d5bb9476 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -1617,10 +1617,10 @@ class Null(ColumnElement): return type_api.NULLTYPE @classmethod - def _singleton(cls): + def _instance(cls): """Return a constant :class:`.Null` construct.""" - return NULL + return Null() def compare(self, other): return isinstance(other, Null) @@ -1641,11 +1641,11 @@ class False_(ColumnElement): return type_api.BOOLEANTYPE def _negate(self): - return TRUE + return True_() @classmethod - def _singleton(cls): - """Return a constant :class:`.False_` construct. + def _instance(cls): + """Return a :class:`.False_` construct. E.g.:: @@ -1679,7 +1679,7 @@ class False_(ColumnElement): """ - return FALSE + return False_() def compare(self, other): return isinstance(other, False_) @@ -1700,17 +1700,17 @@ class True_(ColumnElement): return type_api.BOOLEANTYPE def _negate(self): - return FALSE + return False_() @classmethod def _ifnone(cls, other): if other is None: - return cls._singleton() + return cls._instance() else: return other @classmethod - def _singleton(cls): + def _instance(cls): """Return a constant :class:`.True_` construct. E.g.:: @@ -1745,15 +1745,11 @@ class True_(ColumnElement): """ - return TRUE + return True_() def compare(self, other): return isinstance(other, True_) -NULL = Null() -FALSE = False_() -TRUE = True_() - class ClauseList(ClauseElement): """Describe a list of clauses, separated by an operator. diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py index 2e10b7370..2ffc5468c 100644 --- a/lib/sqlalchemy/sql/expression.py +++ b/lib/sqlalchemy/sql/expression.py @@ -89,9 +89,9 @@ asc = public_factory(UnaryExpression._create_asc, ".expression.asc") desc = public_factory(UnaryExpression._create_desc, ".expression.desc") distinct = public_factory( UnaryExpression._create_distinct, ".expression.distinct") -true = public_factory(True_._singleton, ".expression.true") -false = public_factory(False_._singleton, ".expression.false") -null = public_factory(Null._singleton, ".expression.null") +true = public_factory(True_._instance, ".expression.true") +false = public_factory(False_._instance, ".expression.false") +null = public_factory(Null._instance, ".expression.null") join = public_factory(Join._create_join, ".expression.join") outerjoin = public_factory(Join._create_outerjoin, ".expression.outerjoin") insert = public_factory(Insert, ".expression.insert") diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py index 5c401845b..e8ad88511 100644 --- a/test/sql/test_operators.py +++ b/test/sql/test_operators.py @@ -1,4 +1,4 @@ -from sqlalchemy.testing import fixtures, eq_, is_ +from sqlalchemy.testing import fixtures, eq_, is_, is_not_ from sqlalchemy import testing from sqlalchemy.testing import assert_raises_message from sqlalchemy.sql import column, desc, asc, literal, collate, null, true, false @@ -778,6 +778,25 @@ class ConjunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL): "SELECT x WHERE NOT NULL" ) + def test_constant_non_singleton(self): + is_not_(null(), null()) + is_not_(false(), false()) + is_not_(true(), true()) + + def test_constant_render_distinct(self): + self.assert_compile( + select([null(), null()]), + "SELECT NULL AS anon_1, NULL AS anon_2" + ) + self.assert_compile( + select([true(), true()]), + "SELECT true AS anon_1, true AS anon_2" + ) + self.assert_compile( + select([false(), false()]), + "SELECT false AS anon_1, false AS anon_2" + ) + class OperatorPrecedenceTest(fixtures.TestBase, testing.AssertsCompiledSQL): __dialect__ = 'default' -- cgit v1.2.1 From fee776dde052b0ad2595305d23b07e96b035d3f5 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 21 Oct 2014 17:27:37 -0400 Subject: - changelog --- doc/build/changelog/changelog_10.rst | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 8351b5cce..5a2852783 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,16 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: bug, orm + :pullreq: github:137 + + Repaired support of the ``copy.deepcopy()`` call when used by the + :class:`.orm.util.CascadeOptions` argument, which occurs + if ``copy.deepcopy()`` is being used with :func:`.relationship` + (not an officially supported use case). Pull request courtesy + duesenfranz. + .. change:: :tags: bug, sql :tickets: 3170 -- cgit v1.2.1 From 56d5732fbdf09508784df6dc4c04e5b39ac6be85 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Tue, 21 Oct 2014 17:58:51 -0400 Subject: - changelog for pullreq github:139 - add support for self-referential foreign keys to move over as well when the table name is changed. --- doc/build/changelog/changelog_10.rst | 12 ++++++++++++ lib/sqlalchemy/sql/schema.py | 26 ++++++++++++++++++++++---- test/sql/test_metadata.py | 36 ++++++++++++++++++++++++++++++++++++ 3 files changed, 70 insertions(+), 4 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 5a2852783..f2b1b9a6c 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,18 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: feature, sql + :pullreq: github:139 + + Added a new parameter :paramref:`.Table.tometadata.name` to + the :meth:`.Table.tometadata` method. Similar to + :paramref:`.Table.tometadata.schema`, this argument causes the newly + copied :class:`.Table` to take on the new name instead of + the existing one. An interesting capability this adds is that of + copying a :class:`.Table` object to the *same* :class:`.MetaData` + target with a new name. Pull request courtesy n.d. parker. + .. change:: :tags: bug, orm :pullreq: github:137 diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index 54cf1f529..96cabbf4f 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -786,7 +786,12 @@ class Table(DialectKWArgs, SchemaItem, TableClause): .. versionadded:: 0.9.2 :param name: optional string name indicating the target table name. - If not specified or None, the table name is retained. + If not specified or None, the table name is retained. This allows + a :class:`.Table` to be copied to the same :class:`.MetaData` target + with a new name. + + .. versionadded:: 1.0.0 + """ if name is None: name = self.name @@ -1552,7 +1557,7 @@ class ForeignKey(DialectKWArgs, SchemaItem): ) return self._schema_item_copy(fk) - def _get_colspec(self, schema=None): + def _get_colspec(self, schema=None, table_name=None): """Return a string based 'column specification' for this :class:`.ForeignKey`. @@ -1562,7 +1567,15 @@ class ForeignKey(DialectKWArgs, SchemaItem): """ if schema: _schema, tname, colname = self._column_tokens + if table_name is not None: + tname = table_name return "%s.%s.%s" % (schema, tname, colname) + elif table_name: + schema, tname, colname = self._column_tokens + if schema: + return "%s.%s.%s" % (schema, table_name, colname) + else: + return "%s.%s" % (table_name, colname) elif self._table_column is not None: return "%s.%s" % ( self._table_column.table.fullname, self._table_column.key) @@ -2654,10 +2667,15 @@ class ForeignKeyConstraint(Constraint): event.listen(table.metadata, "before_drop", ddl.DropConstraint(self, on=supports_alter)) - def copy(self, schema=None, **kw): + def copy(self, schema=None, target_table=None, **kw): fkc = ForeignKeyConstraint( [x.parent.key for x in self._elements.values()], - [x._get_colspec(schema=schema) + [x._get_colspec( + schema=schema, + table_name=target_table.name + if target_table is not None + and x._table_key() == x.parent.table.key + else None) for x in self._elements.values()], name=self.name, onupdate=self.onupdate, diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py index 2328e7f73..3c55242fd 100644 --- a/test/sql/test_metadata.py +++ b/test/sql/test_metadata.py @@ -722,6 +722,42 @@ class ToMetaDataTest(fixtures.TestBase, ComparesTables): eq_((table.key, table2.key), ('myschema.mytable', 'myschema.newtable')) + def test_change_name_selfref_fk_moves(self): + meta = MetaData() + + referenced = Table('ref', meta, + Column('id', Integer, primary_key=True), + ) + table = Table('mytable', meta, + Column('id', Integer, primary_key=True), + Column('parent_id', ForeignKey('mytable.id')), + Column('ref_id', ForeignKey('ref.id')) + ) + + table2 = table.tometadata(table.metadata, name='newtable') + assert table.metadata is table2.metadata + assert table2.c.ref_id.references(referenced.c.id) + assert table2.c.parent_id.references(table2.c.id) + + def test_change_name_selfref_fk_moves_w_schema(self): + meta = MetaData() + + referenced = Table('ref', meta, + Column('id', Integer, primary_key=True), + ) + table = Table('mytable', meta, + Column('id', Integer, primary_key=True), + Column('parent_id', ForeignKey('mytable.id')), + Column('ref_id', ForeignKey('ref.id')) + ) + + table2 = table.tometadata( + table.metadata, name='newtable', schema='newschema') + ref2 = referenced.tometadata(table.metadata, schema='newschema') + assert table.metadata is table2.metadata + assert table2.c.ref_id.references(ref2.c.id) + assert table2.c.parent_id.references(table2.c.id) + def _assert_fk(self, t2, schema, expected, referred_schema_fn=None): m2 = MetaData() existing_schema = t2.schema -- cgit v1.2.1 From 47d316ec665e5d5fc7ac750ba62b189a64d98ddd Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 23 Oct 2014 00:40:29 -0400 Subject: - Fixed bug where the ON clause for :meth:`.Query.join`, and :meth:`.Query.outerjoin` to a single-inheritance subclass using ``of_type()`` would not render the "single table criteria" in the ON clause if the ``from_joinpoint=True`` flag were set. fixes #3232 --- doc/build/changelog/changelog_09.rst | 13 +++++++++++++ lib/sqlalchemy/orm/query.py | 7 +++++-- test/orm/inheritance/test_single.py | 25 +++++++++++++++++++++++++ 3 files changed, 43 insertions(+), 2 deletions(-) diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index 8687284e8..fe8dc0150 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -10,6 +10,19 @@ .. include:: changelog_07.rst :start-line: 5 +.. changelog:: + :version: 0.9.9 + + .. change:: + :tags: bug, orm + :tickets: 3232 + :versions: 1.0.0 + + Fixed bug where the ON clause for :meth:`.Query.join`, + and :meth:`.Query.outerjoin` to a single-inheritance subclass + using ``of_type()`` would not render the "single table criteria" in + the ON clause if the ``from_joinpoint=True`` flag were set. + .. changelog:: :version: 0.9.8 :released: October 13, 2014 diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index fce7a3665..dc09e8eb4 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -1835,6 +1835,11 @@ class Query(object): left_entity = prop = None + if isinstance(onclause, interfaces.PropComparator): + of_type = getattr(onclause, '_of_type', None) + else: + of_type = None + if isinstance(onclause, util.string_types): left_entity = self._joinpoint_zero() @@ -1861,8 +1866,6 @@ class Query(object): if isinstance(onclause, interfaces.PropComparator): if right_entity is None: - right_entity = onclause.property.mapper - of_type = getattr(onclause, '_of_type', None) if of_type: right_entity = of_type else: diff --git a/test/orm/inheritance/test_single.py b/test/orm/inheritance/test_single.py index 6112929b6..967c07150 100644 --- a/test/orm/inheritance/test_single.py +++ b/test/orm/inheritance/test_single.py @@ -386,6 +386,30 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest): ] ) + def test_of_type_aliased_fromjoinpoint(self): + Company, Employee, Engineer = self.classes.Company,\ + self.classes.Employee,\ + self.classes.Engineer + companies, employees = self.tables.companies, self.tables.employees + + mapper(Company, companies, properties={ + 'employee':relationship(Employee) + }) + mapper(Employee, employees, polymorphic_on=employees.c.type) + mapper(Engineer, inherits=Employee, polymorphic_identity='engineer') + + sess = create_session() + self.assert_compile( + sess.query(Company).outerjoin( + Company.employee.of_type(Engineer), + aliased=True, from_joinpoint=True), + "SELECT companies.company_id AS companies_company_id, " + "companies.name AS companies_name FROM companies " + "LEFT OUTER JOIN employees AS employees_1 ON " + "companies.company_id = employees_1.company_id " + "AND employees_1.type IN (:type_1)" + ) + def test_outer_join_prop(self): Company, Employee, Engineer = self.classes.Company,\ self.classes.Employee,\ @@ -549,6 +573,7 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest): "AND employees_1.type IN (:type_1)" ) + def test_relationship_to_subclass(self): JuniorEngineer, Company, companies, Manager, \ Employee, employees, Engineer = (self.classes.JuniorEngineer, -- cgit v1.2.1 From 445b9e2aff4e45a7756a8ca8dfbd51bf359a831b Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 23 Oct 2014 01:54:10 -0400 Subject: - Fixed bug in single table inheritance where a chain of joins that included the same single inh entity more than once (normally this should raise an error) could, in some cases depending on what was being joined "from", implicitly alias the second case of the single inh entity, producing a query that "worked". But as this implicit aliasing is not intended in the case of single table inheritance, it didn't really "work" fully and was very misleading, since it wouldn't always appear. fixes #3233 --- doc/build/changelog/changelog_10.rst | 19 ++++++++ doc/build/changelog/migration_10.rst | 86 +++++++++++++++++++++++++++++++++++- lib/sqlalchemy/orm/query.py | 6 +-- test/orm/inheritance/test_single.py | 59 +++++++++++++++++++++++++ 4 files changed, 165 insertions(+), 5 deletions(-) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index f2b1b9a6c..0a6653883 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -153,6 +153,25 @@ :ref:`feature_insert_from_select_defaults` + .. change:: + :tags: bug, orm + :tickets: 3233 + + Fixed bug in single table inheritance where a chain of joins + that included the same single inh entity more than once + (normally this should raise an error) could, in some cases + depending on what was being joined "from", implicitly alias the + second case of the single inh entity, producing + a query that "worked". But as this implicit aliasing is not + intended in the case of single table inheritance, it didn't + really "work" fully and was very misleading, since it wouldn't + always appear. + + .. seealso:: + + :ref:`bug_3233` + + .. change:: :tags: bug, orm :tickets: 3222 diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index 65a8d4431..819acbe53 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -8,7 +8,7 @@ What's New in SQLAlchemy 1.0? undergoing maintenance releases as of May, 2014, and SQLAlchemy version 1.0, as of yet unreleased. - Document last updated: September 25, 2014 + Document last updated: October 23, 2014 Introduction ============ @@ -710,6 +710,90 @@ fine if the criteria happens to be rendered twice in the meantime. :ticket:`3222` +.. _bug_3233: + +Single inheritance join targets will no longer sometimes implicitly alias themselves +------------------------------------------------------------------------------------ + +This is a bug where an unexpected and inconsistent behavior would occur +in some scenarios when joining to a single-table-inheritance entity. The +difficulty this might cause is that the query is supposed to raise an error, +as it is invalid SQL, however the bug would cause an alias to be added which +makes the query "work". The issue is confusing because this aliasing +is not applied consistently and could change based on the nature of the query +preceding the join. + +A simple example is:: + + from sqlalchemy import Integer, Column, String, ForeignKey + from sqlalchemy.orm import Session, relationship + from sqlalchemy.ext.declarative import declarative_base + + Base = declarative_base() + + class A(Base): + __tablename__ = "a" + + id = Column(Integer, primary_key=True) + type = Column(String) + + __mapper_args__ = {'polymorphic_on': type, 'polymorphic_identity': 'a'} + + + class ASub1(A): + __mapper_args__ = {'polymorphic_identity': 'asub1'} + + + class ASub2(A): + __mapper_args__ = {'polymorphic_identity': 'asub2'} + + + class B(Base): + __tablename__ = 'b' + + id = Column(Integer, primary_key=True) + + a_id = Column(Integer, ForeignKey("a.id")) + + a = relationship("A", primaryjoin="B.a_id == A.id", backref='b') + + s = Session() + + print s.query(ASub1).join(B, ASub1.b).join(ASub2, B.a) + + print s.query(ASub1).join(B, ASub1.b).join(ASub2, ASub2.id == B.a_id) + +The two queries at the bottom are equivalent, and should both render +the identical SQL: + + SELECT a.id AS a_id, a.type AS a_type + FROM a JOIN b ON b.a_id = a.id JOIN a ON b.a_id = a.id AND a.type IN (:type_1) + WHERE a.type IN (:type_2) + +The above SQL is invalid, as it renders "a" within the FROM list twice. +The bug however would occur with the second query only and render this instead:: + + SELECT a.id AS a_id, a.type AS a_type + FROM a JOIN b ON b.a_id = a.id JOIN a AS a_1 + ON a_1.id = b.a_id AND a_1.type IN (:type_1) + WHERE a_1.type IN (:type_2) + +Where above, the second join to "a" is aliased. While this seems convenient, +it's not how single-inheritance queries work in general and is misleading +and inconsistent. + +The net effect is that applications which were relying on this bug will now +have an error raised by the database. The solution is to use the expected +form. When referring to multiple subclasses of a single-inheritance +entity in a query, you must manually use aliases to disambiguate the table, +as all the subclasses normally refer to the same table:: + + asub2_alias = aliased(ASub2) + + print s.query(ASub1).join(B, ASub1.b).join(asub2_alias, B.a.of_type(asub2_alias)) + +:ticket:`3233` + .. _bug_3188: ColumnProperty constructs work a lot better with aliases, order_by diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index dc09e8eb4..f07060825 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -1947,11 +1947,9 @@ class Query(object): from_obj, r_info.selectable): overlap = True break - elif sql_util.selectables_overlap(l_info.selectable, - r_info.selectable): - overlap = True - if overlap and l_info.selectable is r_info.selectable: + if (overlap or not create_aliases) and \ + l_info.selectable is r_info.selectable: raise sa_exc.InvalidRequestError( "Can't join table/selectable '%s' to itself" % l_info.selectable) diff --git a/test/orm/inheritance/test_single.py b/test/orm/inheritance/test_single.py index 967c07150..dbbe4c435 100644 --- a/test/orm/inheritance/test_single.py +++ b/test/orm/inheritance/test_single.py @@ -573,6 +573,65 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest): "AND employees_1.type IN (:type_1)" ) + def test_no_aliasing_from_overlap(self): + # test [ticket:3233] + + Company, Employee, Engineer, Manager = self.classes.Company,\ + self.classes.Employee,\ + self.classes.Engineer,\ + self.classes.Manager + + companies, employees = self.tables.companies, self.tables.employees + + mapper(Company, companies, properties={ + 'employees': relationship(Employee, backref="company") + }) + mapper(Employee, employees, polymorphic_on=employees.c.type) + mapper(Engineer, inherits=Employee, polymorphic_identity='engineer') + mapper(Manager, inherits=Employee, polymorphic_identity='manager') + + s = create_session() + + q1 = s.query(Engineer).\ + join(Engineer.company).\ + join(Manager, Company.employees) + + q2 = s.query(Engineer).\ + join(Engineer.company).\ + join(Manager, Company.company_id == Manager.company_id) + + q3 = s.query(Engineer).\ + join(Engineer.company).\ + join(Manager, Company.employees.of_type(Manager)) + + q4 = s.query(Engineer).\ + join(Company, Company.company_id == Engineer.company_id).\ + join(Manager, Company.employees.of_type(Manager)) + + q5 = s.query(Engineer).\ + join(Company, Company.company_id == Engineer.company_id).\ + join(Manager, Company.company_id == Manager.company_id) + + # note that the query is incorrect SQL; we JOIN to + # employees twice. However, this is what's expected so we seek + # to be consistent; previously, aliasing would sneak in due to the + # nature of the "left" side. + for q in [q1, q2, q3, q4, q5]: + self.assert_compile( + q, + "SELECT employees.employee_id AS employees_employee_id, " + "employees.name AS employees_name, " + "employees.manager_data AS employees_manager_data, " + "employees.engineer_info AS employees_engineer_info, " + "employees.type AS employees_type, " + "employees.company_id AS employees_company_id " + "FROM employees JOIN companies " + "ON companies.company_id = employees.company_id " + "JOIN employees " + "ON companies.company_id = employees.company_id " + "AND employees.type IN (:type_1) " + "WHERE employees.type IN (:type_2)" + ) def test_relationship_to_subclass(self): JuniorEngineer, Company, companies, Manager, \ -- cgit v1.2.1 From 3be8da48601c00aae49a0030d5d379d86d7c7c7a Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 23 Oct 2014 02:00:42 -0400 Subject: move this into behavioral changes --- doc/build/changelog/migration_10.rst | 171 ++++++++++++++++++----------------- 1 file changed, 88 insertions(+), 83 deletions(-) diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index 819acbe53..c760ee2d1 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -708,91 +708,11 @@ criteria. Applications that are already adding this criteria to work around this will want to remove its explicit use, though it should continue to work fine if the criteria happens to be rendered twice in the meantime. -:ticket:`3222` - -.. _bug_3233: - -Single inheritance join targets will no longer sometimes implicitly alias themselves ------------------------------------------------------------------------------------- - -This is a bug where an unexpected and inconsistent behavior would occur -in some scenarios when joining to a single-table-inheritance entity. The -difficulty this might cause is that the query is supposed to raise an error, -as it is invalid SQL, however the bug would cause an alias to be added which -makes the query "work". The issue is confusing because this aliasing -is not applied consistently and could change based on the nature of the query -preceding the join. - -A simple example is:: - - from sqlalchemy import Integer, Column, String, ForeignKey - from sqlalchemy.orm import Session, relationship - from sqlalchemy.ext.declarative import declarative_base - - Base = declarative_base() - - class A(Base): - __tablename__ = "a" - - id = Column(Integer, primary_key=True) - type = Column(String) - - __mapper_args__ = {'polymorphic_on': type, 'polymorphic_identity': 'a'} - - - class ASub1(A): - __mapper_args__ = {'polymorphic_identity': 'asub1'} - - - class ASub2(A): - __mapper_args__ = {'polymorphic_identity': 'asub2'} - - - class B(Base): - __tablename__ = 'b' - - id = Column(Integer, primary_key=True) - - a_id = Column(Integer, ForeignKey("a.id")) - - a = relationship("A", primaryjoin="B.a_id == A.id", backref='b') - - s = Session() - - print s.query(ASub1).join(B, ASub1.b).join(ASub2, B.a) - - print s.query(ASub1).join(B, ASub1.b).join(ASub2, ASub2.id == B.a_id) - -The two queries at the bottom are equivalent, and should both render -the identical SQL: - - SELECT a.id AS a_id, a.type AS a_type - FROM a JOIN b ON b.a_id = a.id JOIN a ON b.a_id = a.id AND a.type IN (:type_1) - WHERE a.type IN (:type_2) - -The above SQL is invalid, as it renders "a" within the FROM list twice. -The bug however would occur with the second query only and render this instead:: - - SELECT a.id AS a_id, a.type AS a_type - FROM a JOIN b ON b.a_id = a.id JOIN a AS a_1 - ON a_1.id = b.a_id AND a_1.type IN (:type_1) - WHERE a_1.type IN (:type_2) - -Where above, the second join to "a" is aliased. While this seems convenient, -it's not how single-inheritance queries work in general and is misleading -and inconsistent. - -The net effect is that applications which were relying on this bug will now -have an error raised by the database. The solution is to use the expected -form. When referring to multiple subclasses of a single-inheritance -entity in a query, you must manually use aliases to disambiguate the table, -as all the subclasses normally refer to the same table:: - - asub2_alias = aliased(ASub2) +.. seealso:: - print s.query(ASub1).join(B, ASub1.b).join(asub2_alias, B.a.of_type(asub2_alias)) + :ref:`bug_3233` -:ticket:`3233` +:ticket:`3222` .. _bug_3188: @@ -1090,6 +1010,91 @@ joined loader options can still be used:: q = sess.query(Object).options( lazyload('*'), joinedload("some_manytoone")).yield_per(100) +.. _bug_3233: + +Single inheritance join targets will no longer sometimes implicitly alias themselves +------------------------------------------------------------------------------------ + +This is a bug where an unexpected and inconsistent behavior would occur +in some scenarios when joining to a single-table-inheritance entity. The +difficulty this might cause is that the query is supposed to raise an error, +as it is invalid SQL, however the bug would cause an alias to be added which +makes the query "work". The issue is confusing because this aliasing +is not applied consistently and could change based on the nature of the query +preceding the join. + +A simple example is:: + + from sqlalchemy import Integer, Column, String, ForeignKey + from sqlalchemy.orm import Session, relationship + from sqlalchemy.ext.declarative import declarative_base + + Base = declarative_base() + + class A(Base): + __tablename__ = "a" + + id = Column(Integer, primary_key=True) + type = Column(String) + + __mapper_args__ = {'polymorphic_on': type, 'polymorphic_identity': 'a'} + + + class ASub1(A): + __mapper_args__ = {'polymorphic_identity': 'asub1'} + + + class ASub2(A): + __mapper_args__ = {'polymorphic_identity': 'asub2'} + + + class B(Base): + __tablename__ = 'b' + + id = Column(Integer, primary_key=True) + + a_id = Column(Integer, ForeignKey("a.id")) + + a = relationship("A", primaryjoin="B.a_id == A.id", backref='b') + + s = Session() + + print s.query(ASub1).join(B, ASub1.b).join(ASub2, B.a) + + print s.query(ASub1).join(B, ASub1.b).join(ASub2, ASub2.id == B.a_id) + +The two queries at the bottom are equivalent, and should both render +the identical SQL: + + SELECT a.id AS a_id, a.type AS a_type + FROM a JOIN b ON b.a_id = a.id JOIN a ON b.a_id = a.id AND a.type IN (:type_1) + WHERE a.type IN (:type_2) + +The above SQL is invalid, as it renders "a" within the FROM list twice. +The bug however would occur with the second query only and render this instead:: + + SELECT a.id AS a_id, a.type AS a_type + FROM a JOIN b ON b.a_id = a.id JOIN a AS a_1 + ON a_1.id = b.a_id AND a_1.type IN (:type_1) + WHERE a_1.type IN (:type_2) + +Where above, the second join to "a" is aliased. While this seems convenient, +it's not how single-inheritance queries work in general and is misleading +and inconsistent. + +The net effect is that applications which were relying on this bug will now +have an error raised by the database. The solution is to use the expected +form. When referring to multiple subclasses of a single-inheritance +entity in a query, you must manually use aliases to disambiguate the table, +as all the subclasses normally refer to the same table:: + + asub2_alias = aliased(ASub2) + + print s.query(ASub1).join(B, ASub1.b).join(asub2_alias, B.a.of_type(asub2_alias)) + +:ticket:`3233` + + .. _migration_migration_deprecated_orm_events: -- cgit v1.2.1 From 393470c7abf1b1e0c32f037e0ed1eb8e8ce82543 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Thu, 23 Oct 2014 02:10:01 -0400 Subject: typo --- doc/build/changelog/migration_10.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index c760ee2d1..bc7fa139f 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -1064,7 +1064,7 @@ A simple example is:: print s.query(ASub1).join(B, ASub1.b).join(ASub2, ASub2.id == B.a_id) The two queries at the bottom are equivalent, and should both render -the identical SQL: +the identical SQL:: SELECT a.id AS a_id, a.type AS a_type FROM a JOIN b ON b.a_id = a.id JOIN a ON b.a_id = a.id AND a.type IN (:type_1) -- cgit v1.2.1 From 2083ccfbcf7ff1911ce282c8c891a7bf8a86a47c Mon Sep 17 00:00:00 2001 From: jlgoldb2 Date: Sun, 26 Oct 2014 00:01:42 -0700 Subject: changed connnection to connection --- lib/sqlalchemy/events.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py index 1ff35b8b0..b4f057b0a 100644 --- a/lib/sqlalchemy/events.py +++ b/lib/sqlalchemy/events.py @@ -338,7 +338,7 @@ class PoolEvents(event.Events): """ - def reset(self, dbapi_connnection, connection_record): + def reset(self, dbapi_connection, connection_record): """Called before the "reset" action occurs for a pooled connection. This event represents -- cgit v1.2.1 From e1d1d999c9a688f4c8dbbe885438c63d6ef494c6 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sun, 26 Oct 2014 16:49:44 -0400 Subject: changelog --- doc/build/changelog/changelog_10.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 0a6653883..e63e023d9 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -21,6 +21,15 @@ series as well. For changes that are specific to 1.0 with an emphasis on compatibility concerns, see :doc:`/changelog/migration_10`. + .. change:: + :tags: bug, sql + :pullreq: github:146 + + Fixed the name of the :paramref:`.PoolEvents.reset.dbapi_connection` + parameter as passed to this event; in particular this affects + usage of the "named" argument style for this event. Pull request + courtesy Jason Goldberger. + .. change:: :tags: feature, sql :pullreq: github:139 -- cgit v1.2.1 From d2c1edfb15334a2fb6ada5b064563c144ac22ad7 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 29 Oct 2014 14:55:42 -0400 Subject: - added new backend for pysqlcipher, as we will probably get requests for it soon. --- doc/build/changelog/changelog_09.rst | 12 +++ doc/build/dialects/sqlite.rst | 7 +- lib/sqlalchemy/dialects/sqlite/__init__.py | 2 +- lib/sqlalchemy/dialects/sqlite/pysqlcipher.py | 116 ++++++++++++++++++++++++++ test/requirements.py | 4 +- 5 files changed, 137 insertions(+), 4 deletions(-) create mode 100644 lib/sqlalchemy/dialects/sqlite/pysqlcipher.py diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index fe8dc0150..6909da357 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -13,6 +13,18 @@ .. changelog:: :version: 0.9.9 + .. change:: + :tags: feature, sqlite + :versions: 1.0.0 + + Added a new SQLite backend for the SQLCipher backend. This backend + provides for encrypted SQLite databases using the pysqlcipher Python + driver, which is very similar to the pysqlite driver. + + .. seealso:: + + :mod:`~sqlalchemy.dialects.sqlite.pysqlcipher` + .. change:: :tags: bug, orm :tickets: 3232 diff --git a/doc/build/dialects/sqlite.rst b/doc/build/dialects/sqlite.rst index 21fd4e3aa..a18b0ba7b 100644 --- a/doc/build/dialects/sqlite.rst +++ b/doc/build/dialects/sqlite.rst @@ -28,4 +28,9 @@ they originate from :mod:`sqlalchemy.types` or from the local dialect:: Pysqlite -------- -.. automodule:: sqlalchemy.dialects.sqlite.pysqlite \ No newline at end of file +.. automodule:: sqlalchemy.dialects.sqlite.pysqlite + +Pysqlcipher +----------- + +.. automodule:: sqlalchemy.dialects.sqlite.pysqlcipher \ No newline at end of file diff --git a/lib/sqlalchemy/dialects/sqlite/__init__.py b/lib/sqlalchemy/dialects/sqlite/__init__.py index 0eceaa537..a53d53e9d 100644 --- a/lib/sqlalchemy/dialects/sqlite/__init__.py +++ b/lib/sqlalchemy/dialects/sqlite/__init__.py @@ -5,7 +5,7 @@ # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -from sqlalchemy.dialects.sqlite import base, pysqlite +from sqlalchemy.dialects.sqlite import base, pysqlite, pysqlcipher # default dialect base.dialect = pysqlite.dialect diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py new file mode 100644 index 000000000..3c55a1de7 --- /dev/null +++ b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py @@ -0,0 +1,116 @@ +# sqlite/pysqlcipher.py +# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" +.. dialect:: sqlite+pysqlcipher + :name: pysqlcipher + :dbapi: pysqlcipher + :connectstring: sqlite+pysqlcipher://:passphrase/file_path[?kdf_iter=] + :url: https://pypi.python.org/pypi/pysqlcipher + + ``pysqlcipher`` is a fork of the standard ``pysqlite`` driver to make + use of the `SQLCipher `_ backend. + + .. versionadded:: 0.9.9 + +Driver +------ + +The driver here is the `pysqlcipher `_ +driver, which makes use of the SQLCipher engine. This system essentially +introduces new PRAGMA commands to SQLite which allows the setting of a +passphrase and other encryption parameters, allowing the database +file to be encrypted. + +Connect Strings +--------------- + +The format of the connect string is in every way the same as that +of the :mod:`~sqlalchemy.dialects.sqlite.pysqlite` driver, except that the +"password" field is now accepted, which should contain a passphrase:: + + e = create_engine('sqlite+pysqlcipher://:testing@/foo.db') + +For an absolute file path, two leading slashes should be used for the +database name:: + + e = create_engine('sqlite+pysqlcipher://:testing@//path/to/foo.db') + +A selection of additional encryption-related pragmas supported by SQLCipher +as documented at https://www.zetetic.net/sqlcipher/sqlcipher-api/ can be passed +in the query string, and will result in that PRAGMA being called for each +new connection. Currently, ``cipher``, ``kdf_iter`` +``cipher_page_size`` and ``cipher_use_hmac`` are supported:: + + e = create_engine('sqlite+pysqlcipher://:testing@/foo.db?cipher=aes-256-cfb&kdf_iter=64000') + + +Pooling Behavior +---------------- + +The driver makes a change to the default pool behavior of pysqlite +as described in :ref:`pysqlite_threading_pooling`. The pysqlcipher driver +has been observed to be significantly slower on connection than the +pysqlite driver, most likely due to the encryption overhead, so the +dialect here defaults to using the :class:`.SingletonThreadPool` +implementation, +instead of the :class:`.NullPool` pool used by pysqlite. As always, the pool +implementation is entirely configurable using the +:paramref:`.create_engine.poolclass` parameter; the :class:`.StaticPool` may +be more feasible for single-threaded use, or :class:`.NullPool` may be used +to prevent unencrypted connections from being held open for long periods of +time, at the expense of slower startup time for new connections. + + +""" +from __future__ import absolute_import +from .pysqlite import SQLiteDialect_pysqlite +from ...engine import url as _url +from ... import pool + + +class SQLiteDialect_pysqlcipher(SQLiteDialect_pysqlite): + driver = 'pysqlcipher' + + pragmas = ('kdf_iter', 'cipher', 'cipher_page_size', 'cipher_use_hmac') + + @classmethod + def dbapi(cls): + from pysqlcipher import dbapi2 as sqlcipher + return sqlcipher + + @classmethod + def get_pool_class(cls, url): + return pool.SingletonThreadPool + + def connect(self, *cargs, **cparams): + passphrase = cparams.pop('passphrase', '') + + pragmas = dict( + (key, cparams.pop(key, None)) for key in + self.pragmas + ) + + conn = super(SQLiteDialect_pysqlcipher, self).\ + connect(*cargs, **cparams) + conn.execute('pragma key="%s"' % passphrase) + for prag, value in pragmas.items(): + if value is not None: + conn.execute('pragma %s=%s' % (prag, value)) + + return conn + + def create_connect_args(self, url): + super_url = _url.URL( + url.drivername, username=url.username, + host=url.host, database=url.database, query=url.query) + c_args, opts = super(SQLiteDialect_pysqlcipher, self).\ + create_connect_args(super_url) + opts['passphrase'] = url.password + return c_args, opts + +dialect = SQLiteDialect_pysqlcipher diff --git a/test/requirements.py b/test/requirements.py index 0a695b641..05ca8d717 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -449,7 +449,7 @@ class DefaultRequirements(SuiteRequirements): after an insert() construct executes. """ return fails_on_everything_except('mysql', - 'sqlite+pysqlite', + 'sqlite+pysqlite', 'sqlite+pysqlcipher', 'sybase', 'mssql') @property @@ -466,7 +466,7 @@ class DefaultRequirements(SuiteRequirements): """ return skip_if('mssql+pymssql', 'crashes on pymssql') + \ fails_on_everything_except('mysql', - 'sqlite+pysqlite') + 'sqlite+pysqlite', 'sqlite+pysqlcipher') @property def sane_multi_rowcount(self): -- cgit v1.2.1 From 7bf5ac9c1e814c999d4930941935e1d5cfd236bf Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Fri, 31 Oct 2014 20:00:42 -0400 Subject: - ensure kwargs are passed for limit clause on a compound select as well, further fixes for #3034 --- lib/sqlalchemy/sql/compiler.py | 2 +- test/sql/test_compiler.py | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index a6c30b7dc..5fa78ad0f 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -813,7 +813,7 @@ class SQLCompiler(Compiled): text += self.order_by_clause(cs, **kwargs) text += (cs._limit_clause is not None or cs._offset_clause is not None) and \ - self.limit_clause(cs) or "" + self.limit_clause(cs, **kwargs) or "" if self.ctes and \ compound_index == 0 and toplevel: diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py index 3e6b87351..bfafed599 100644 --- a/test/sql/test_compiler.py +++ b/test/sql/test_compiler.py @@ -238,6 +238,22 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): checkparams=params ) + def test_limit_offset_select_literal_binds(self): + stmt = select([1]).limit(5).offset(6) + self.assert_compile( + stmt, + "SELECT 1 LIMIT 5 OFFSET 6", + literal_binds=True + ) + + def test_limit_offset_compound_select_literal_binds(self): + stmt = select([1]).union(select([2])).limit(5).offset(6) + self.assert_compile( + stmt, + "SELECT 1 UNION SELECT 2 LIMIT 5 OFFSET 6", + literal_binds=True + ) + def test_select_precol_compile_ordering(self): s1 = select([column('x')]).select_from(text('a')).limit(5).as_scalar() s2 = select([s1]).limit(10) -- cgit v1.2.1 From edec583b459e955a30d40b5c5d8baaed0a2ec1c6 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 5 Nov 2014 04:22:30 -0500 Subject: - Fixed bug regarding expression mutations which could express itself as a "Could not locate column" error when using :class:`.Query` to select from multiple, anonymous column entities when querying against SQLite, as a side effect of the "join rewriting" feature used by the SQLite dialect. fixes #3241 --- doc/build/changelog/changelog_09.rst | 11 +++++++++++ lib/sqlalchemy/sql/elements.py | 7 +++++++ test/sql/test_generative.py | 13 +++++++++++++ test/sql/test_join_rewriting.py | 25 ++++++++++++++++++++++++- 4 files changed, 55 insertions(+), 1 deletion(-) diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index 6909da357..8ed2ea776 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -13,6 +13,17 @@ .. changelog:: :version: 0.9.9 + .. change:: + :tags: bug, orm, sqlite + :versions: 1.0.0 + :tickets: 3241 + + Fixed bug regarding expression mutations which could express + itself as a "Could not locate column" error when using + :class:`.Query` to select from multiple, anonymous column + entities when querying against SQLite, as a side effect of the + "join rewriting" feature used by the SQLite dialect. + .. change:: :tags: feature, sqlite :versions: 1.0.0 diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index 4d5bb9476..fa9b66024 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -861,6 +861,9 @@ class ColumnElement(operators.ColumnOperators, ClauseElement): expressions and function calls. """ + while self._is_clone_of is not None: + self = self._is_clone_of + return _anonymous_label( '%%(%d %s)s' % (id(self), getattr(self, 'name', 'anon')) ) @@ -2778,6 +2781,10 @@ class Grouping(ColumnElement): def self_group(self, against=None): return self + @property + def _key_label(self): + return self._label + @property def _label(self): return getattr(self.element, '_label', None) or self.anon_label diff --git a/test/sql/test_generative.py b/test/sql/test_generative.py index 6044cecb0..6b86614e6 100644 --- a/test/sql/test_generative.py +++ b/test/sql/test_generative.py @@ -132,6 +132,19 @@ class TraversalTest(fixtures.TestBase, AssertsExecutionResults): assert struct == s2 assert struct.is_other(s2) + def test_clone_anon_label(self): + from sqlalchemy.sql.elements import Grouping + c1 = Grouping(literal_column('q')) + s1 = select([c1]) + + class Vis(CloningVisitor): + def visit_grouping(self, elem): + pass + + vis = Vis() + s2 = vis.traverse(s1) + eq_(list(s2.inner_columns)[0].anon_label, c1.anon_label) + def test_change_in_place(self): struct = B(A("expr1"), A("expr2"), B(A("expr1b"), A("expr2b")), A("expr3")) diff --git a/test/sql/test_join_rewriting.py b/test/sql/test_join_rewriting.py index c8b24e2f2..ced65d7f1 100644 --- a/test/sql/test_join_rewriting.py +++ b/test/sql/test_join_rewriting.py @@ -251,6 +251,16 @@ class _JoinRewriteTestBase(AssertsCompiledSQL): self._f_b1a_where_in_b2a ) + def test_anon_scalar_subqueries(self): + s1 = select([1]).as_scalar() + s2 = select([2]).as_scalar() + + s = select([s1, s2]).apply_labels() + self._test( + s, + self._anon_scalar_subqueries + ) + class JoinRewriteTest(_JoinRewriteTestBase, fixtures.TestBase): @@ -389,6 +399,10 @@ class JoinRewriteTest(_JoinRewriteTestBase, fixtures.TestBase): "FROM a JOIN b2 ON a.id = b2.a_id)" ) + _anon_scalar_subqueries = ( + "SELECT (SELECT 1) AS anon_1, (SELECT 2) AS anon_2" + ) + class JoinPlainTest(_JoinRewriteTestBase, fixtures.TestBase): @@ -497,6 +511,10 @@ class JoinPlainTest(_JoinRewriteTestBase, fixtures.TestBase): "FROM a JOIN b2 ON a.id = b2.a_id)" ) + _anon_scalar_subqueries = ( + "SELECT (SELECT 1) AS anon_1, (SELECT 2) AS anon_2" + ) + class JoinNoUseLabelsTest(_JoinRewriteTestBase, fixtures.TestBase): @@ -605,6 +623,10 @@ class JoinNoUseLabelsTest(_JoinRewriteTestBase, fixtures.TestBase): "FROM a JOIN b2 ON a.id = b2.a_id)" ) + _anon_scalar_subqueries = ( + "SELECT (SELECT 1) AS anon_1, (SELECT 2) AS anon_2" + ) + class JoinExecTest(_JoinRewriteTestBase, fixtures.TestBase): @@ -615,7 +637,8 @@ class JoinExecTest(_JoinRewriteTestBase, fixtures.TestBase): _a_bc = _a_bc_comma_a1_selbc = _a__b_dc = _a_bkeyassoc = \ _a_bkeyassoc_aliased = _a_atobalias_balias_c_w_exists = \ _a_atobalias_balias = _b_ab1_union_c_ab2 = \ - _b_a_id_double_overlap_annotated = _f_b1a_where_in_b2a = None + _b_a_id_double_overlap_annotated = _f_b1a_where_in_b2a = \ + _anon_scalar_subqueries = None @classmethod def setup_class(cls): -- cgit v1.2.1 From ea637cef2d9ec54b14fac3620b1cfd47da723f3f Mon Sep 17 00:00:00 2001 From: Paulo Bu Date: Wed, 5 Nov 2014 13:15:08 +0100 Subject: Small improvement on FlushError can't delete error message Output in the error message the table name and the column name. --- lib/sqlalchemy/orm/persistence.py | 4 ++-- test/orm/test_unitofwork.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py index 114b79ea5..28254cc10 100644 --- a/lib/sqlalchemy/orm/persistence.py +++ b/lib/sqlalchemy/orm/persistence.py @@ -441,9 +441,9 @@ def _collect_delete_commands(base_mapper, uowtransaction, table, state, state_dict, col) if value is None: raise orm_exc.FlushError( - "Can't delete from table " + "Can't delete from table %s " "using NULL for primary " - "key value") + "key value on column %s" % (table, col)) if update_version_id is not None and \ table.c.contains_column(mapper.version_id_col): diff --git a/test/orm/test_unitofwork.py b/test/orm/test_unitofwork.py index a54097b03..247c5e7a8 100644 --- a/test/orm/test_unitofwork.py +++ b/test/orm/test_unitofwork.py @@ -2505,7 +2505,8 @@ class PartialNullPKTest(fixtures.MappedTest): s.delete(t1) assert_raises_message( orm_exc.FlushError, - "Can't delete from table using NULL for primary key value", + "Can't delete from table t1 using NULL " + "for primary key value on column t1.col2", s.commit ) -- cgit v1.2.1 From 8200c2cd35b3e85a636baabe8324b9ecbbd8fedf Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Wed, 5 Nov 2014 15:11:13 -0500 Subject: - edits to the subqueryload ordering merge --- doc/build/faq.rst | 108 ++++++++++++++++++++++++++------------------- doc/build/orm/loading.rst | 25 ++++++++--- doc/build/orm/tutorial.rst | 9 ++-- 3 files changed, 86 insertions(+), 56 deletions(-) diff --git a/doc/build/faq.rst b/doc/build/faq.rst index fa10ba44b..12d8e0acc 100644 --- a/doc/build/faq.rst +++ b/doc/build/faq.rst @@ -603,62 +603,78 @@ The same idea applies to all the other arguments, such as ``foreign_keys``:: foo = relationship(Dest, foreign_keys=[foo_id, bar_id]) -.. _faq_subqueryload_sort: - -Why must I always ``ORDER BY`` a unique column when using ``subqueryload``? ----------------------------------------------------------------------------- - -The SQL standard prescribes that RDBMSs are free to return rows in any order it -deems appropriate, if no ``ORDER BY`` clause is specified. This even extends to -the case where the ``ORDER BY`` clause is not unique across all rows, i.e. rows -with the same value in the ``ORDER BY`` column(s) will not necessarily be -returned in a deterministic order. - -SQLAlchemy implements :func:`.orm.subqueryload` by issuing a separate query -(where the table specified in the relationship is joined to the original query) -and then attempting to match up the results in Python. This works fine -normally: +.. _faq_subqueryload_limit_sort: + +Why is ``ORDER BY`` required with ``LIMIT`` (especially with ``subqueryload()``)? +--------------------------------------------------------------------------------- + +A relational database can return rows in any +arbitrary order, when an explicit ordering is not set. +While this ordering very often corresponds to the natural +order of rows within a table, this is not the case for all databases and +all queries. The consequence of this is that any query that limits rows +using ``LIMIT`` or ``OFFSET`` should **always** specify an ``ORDER BY``. +Otherwise, it is not deterministic which rows will actually be returned. + +When we use a SQLAlchemy method like :meth:`.Query.first`, we are in fact +applying a ``LIMIT`` of one to the query, so without an explicit ordering +it is not deterministic what row we actually get back. +While we may not notice this for simple queries on databases that usually +returns rows in their natural +order, it becomes much more of an issue if we also use :func:`.orm.subqueryload` +to load related collections, and we may not be loading the collections +as intended. + +SQLAlchemy implements :func:`.orm.subqueryload` by issuing a separate query, +the results of which are matched up to the results from the first query. +We see two queries emitted like this: .. sourcecode:: python+sql >>> session.query(User).options(subqueryload(User.addresses)).all() - {opensql}# the "main" query + {opensql}-- the "main" query SELECT users.id AS users_id FROM users {stop} - {opensql}# the "load" query issued by subqueryload - SELECT addresses.id AS addresses_id, addresses.user_id AS addresses_user_id, anon_1.users_id AS anon_1_users_id - FROM (SELECT users.id AS users_id - FROM users) AS anon_1 JOIN addresses ON anon_1.users_id = addresses.user_id ORDER BY anon_1.users_id - -Notice how the main query is a subquery in the load query. When an -``OFFSET``/``LIMIT`` is involved, however, things get a bit tricky: + {opensql}-- the "load" query issued by subqueryload + SELECT addresses.id AS addresses_id, + addresses.user_id AS addresses_user_id, + anon_1.users_id AS anon_1_users_id + FROM (SELECT users.id AS users_id FROM users) AS anon_1 + JOIN addresses ON anon_1.users_id = addresses.user_id + ORDER BY anon_1.users_id + +The second query embeds the first query as a source of rows. +When the inner query uses ``OFFSET`` and/or ``LIMIT`` without ordering, +the two queries may not see the same results: .. sourcecode:: python+sql >>> user = session.query(User).options(subqueryload(User.addresses)).first() - {opensql}# the "main" query + {opensql}-- the "main" query SELECT users.id AS users_id FROM users LIMIT 1 {stop} - {opensql}# the "load" query issued by subqueryload - SELECT addresses.id AS addresses_id, addresses.user_id AS addresses_user_id, anon_1.users_id AS anon_1_users_id - FROM (SELECT users.id AS users_id - FROM users - LIMIT 1) AS anon_1 JOIN addresses ON anon_1.users_id = addresses.user_id ORDER BY anon_1.users_id - -The main query is still a subquery in the load query, but *it may return a -different set of results in the second query from the first* because it does -not have a deterministic sort order! Depending on database internals, there is -a chance we may get the following resultset for the two queries:: - + {opensql}-- the "load" query issued by subqueryload + SELECT addresses.id AS addresses_id, + addresses.user_id AS addresses_user_id, + anon_1.users_id AS anon_1_users_id + FROM (SELECT users.id AS users_id FROM users LIMIT 1) AS anon_1 + JOIN addresses ON anon_1.users_id = addresses.user_id + ORDER BY anon_1.users_id + +Depending on database specifics, there is +a chance we may get the a result like the following for the two queries:: + + -- query #1 +--------+ |users_id| +--------+ | 1| +--------+ + -- query #2 +------------+-----------------+---------------+ |addresses_id|addresses_user_id|anon_1_users_id| +------------+-----------------+---------------+ @@ -667,26 +683,28 @@ a chance we may get the following resultset for the two queries:: | 4| 2| 2| +------------+-----------------+---------------+ -From SQLAlchemy's point of view, it didn't get any addresses back for user 1, -so ``user.addresses`` is empty. Oops. +Above, we receive two ``addresses`` rows for ``user.id`` of 2, and none for +1. We've wasted two rows and failed to actually load the collection. This +is an insidious error because without looking at the SQL and the results, the +ORM will not show that there's any issue; if we access the ``addresses`` +for the ``User`` we have, it will emit a lazy load for the collection and we +won't see that anything actually went wrong. The solution to this problem is to always specify a deterministic sort order, so that the main query always returns the same set of rows. This generally -means that you should :meth:`.Query.order_by` on a unique column on the table, -usually the primary key:: +means that you should :meth:`.Query.order_by` on a unique column on the table. +The primary key is a good choice for this:: session.query(User).options(subqueryload(User.addresses)).order_by(User.id).first() -You can get away with not doing a sort if the ``OFFSET``/``LIMIT`` does not -throw away any rows at all, but it's much simpler to remember to always ``ORDER -BY`` the primary key:: - - session.query(User).options(subqueryload(User.addresses)).filter(User.id == 1).first() - Note that :func:`.joinedload` does not suffer from the same problem because only one query is ever issued, so the load query cannot be different from the main query. +.. seealso:: + + :ref:`subqueryload_ordering` + Performance =========== diff --git a/doc/build/orm/loading.rst b/doc/build/orm/loading.rst index 27846b9b2..b2d8124e2 100644 --- a/doc/build/orm/loading.rst +++ b/doc/build/orm/loading.rst @@ -120,21 +120,32 @@ query options: # set children to load eagerly with a second statement session.query(Parent).options(subqueryload('children')).all() -.. _subquery_loading_tips: +.. _subqueryload_ordering: -Subquery Loading Tips -^^^^^^^^^^^^^^^^^^^^^ +The Importance of Ordering +-------------------------- + +A query which makes use of :func:`.subqueryload` in conjunction with a +limiting modifier such as :meth:`.Query.first`, :meth:`.Query.limit`, +or :meth:`.Query.offset` should **always** include :meth:`.Query.order_by` +against unique column(s) such as the primary key, so that the additional queries +emitted by :func:`.subqueryload` include +the same ordering as used by the parent query. Without it, there is a chance +that the inner query could return the wrong rows:: -If you have ``LIMIT`` or ``OFFSET`` in your query, you **must** ``ORDER BY`` a -unique column, generally the primary key of your table, in order to ensure -correct results (see :ref:`faq_subqueryload_sort`):: + # incorrect, no ORDER BY + session.query(User).options(subqueryload(User.addresses)).first() - # incorrect + # incorrect if User.name is not unique session.query(User).options(subqueryload(User.addresses)).order_by(User.name).first() # correct session.query(User).options(subqueryload(User.addresses)).order_by(User.name, User.id).first() +.. seealso:: + + :ref:`faq_subqueryload_limit_sort` - detailed example + Loading Along Paths ------------------- diff --git a/doc/build/orm/tutorial.rst b/doc/build/orm/tutorial.rst index 19f3f6fea..8871ce765 100644 --- a/doc/build/orm/tutorial.rst +++ b/doc/build/orm/tutorial.rst @@ -1631,11 +1631,12 @@ very easy to use: >>> jack.addresses [, ] -.. warning:: +.. note:: - If you use :func:`.subqueryload`, you should generally - :meth:`.Query.order_by` on a unique column in order to ensure correct - results. See :ref:`subquery_loading_tips`. + :func:`.subqueryload` when used in conjunction with limiting such as + :meth:`.Query.first`, :meth:`.Query.limit` or :meth:`.Query.offset` + should also include :meth:`.Query.order_by` on a unique column in order to + ensure correct results. See :ref:`subqueryload_ordering`. Joined Load ------------- -- cgit v1.2.1