summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2010-12-30 14:22:43 -0500
committerMike Bayer <mike_mp@zzzcomputing.com>2010-12-30 14:22:43 -0500
commit426c4356eba28f8bb25b7685e43e49e2ed1131e6 (patch)
treecbb0ac64aa40905e96005393636a153217d724ba /lib/sqlalchemy
parentecf1571ba79a81567428d345a4ec10255305de97 (diff)
downloadsqlalchemy-426c4356eba28f8bb25b7685e43e49e2ed1131e6.tar.gz
- removes the "on_" prefix.
Diffstat (limited to 'lib/sqlalchemy')
-rw-r--r--lib/sqlalchemy/engine/base.py30
-rw-r--r--lib/sqlalchemy/engine/ddl.py16
-rw-r--r--lib/sqlalchemy/engine/strategies.py6
-rw-r--r--lib/sqlalchemy/events.py72
-rw-r--r--lib/sqlalchemy/ext/mutable.py34
-rw-r--r--lib/sqlalchemy/interfaces.py32
-rw-r--r--lib/sqlalchemy/orm/attributes.py22
-rw-r--r--lib/sqlalchemy/orm/collections.py8
-rw-r--r--lib/sqlalchemy/orm/deprecated_interfaces.py34
-rw-r--r--lib/sqlalchemy/orm/descriptor_props.py14
-rw-r--r--lib/sqlalchemy/orm/dynamic.py4
-rw-r--r--lib/sqlalchemy/orm/events.py102
-rw-r--r--lib/sqlalchemy/orm/instrumentation.py8
-rw-r--r--lib/sqlalchemy/orm/mapper.py36
-rw-r--r--lib/sqlalchemy/orm/query.py6
-rw-r--r--lib/sqlalchemy/orm/session.py22
-rw-r--r--lib/sqlalchemy/orm/state.py10
-rw-r--r--lib/sqlalchemy/orm/strategies.py4
-rw-r--r--lib/sqlalchemy/orm/unitofwork.py6
-rw-r--r--lib/sqlalchemy/orm/util.py4
-rw-r--r--lib/sqlalchemy/pool.py20
-rw-r--r--lib/sqlalchemy/schema.py18
22 files changed, 254 insertions, 254 deletions
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index c1f9905b6..e39e2b175 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -507,7 +507,7 @@ class Dialect(object):
raise NotImplementedError()
- def on_connect(self):
+ def connect(self):
"""return a callable which sets up a newly created DBAPI connection.
The callable accepts a single argument "conn" which is the
@@ -1967,14 +1967,14 @@ def _listener_connection_cls(cls, dispatch):
"""
class EventListenerConnection(cls):
def execute(self, clauseelement, *multiparams, **params):
- for fn in dispatch.on_before_execute:
+ for fn in dispatch.before_execute:
clauseelement, multiparams, params = \
fn(self, clauseelement, multiparams, params)
ret = super(EventListenerConnection, self).\
execute(clauseelement, *multiparams, **params)
- for fn in dispatch.on_after_execute:
+ for fn in dispatch.after_execute:
fn(self, clauseelement, multiparams, params, ret)
return ret
@@ -1987,7 +1987,7 @@ def _listener_connection_cls(cls, dispatch):
def _before_cursor_execute(self, context, cursor,
statement, parameters):
- for fn in dispatch.on_before_cursor_execute:
+ for fn in dispatch.before_cursor_execute:
statement, parameters = \
fn(self, cursor, statement, parameters,
context, context.executemany)
@@ -1995,59 +1995,59 @@ def _listener_connection_cls(cls, dispatch):
def _after_cursor_execute(self, context, cursor,
statement, parameters):
- dispatch.on_after_cursor_execute(self, cursor,
+ dispatch.after_cursor_execute(self, cursor,
statement,
parameters,
context,
context.executemany)
def _begin_impl(self):
- dispatch.on_begin(self)
+ dispatch.begin(self)
return super(EventListenerConnection, self).\
_begin_impl()
def _rollback_impl(self):
- dispatch.on_rollback(self)
+ dispatch.rollback(self)
return super(EventListenerConnection, self).\
_rollback_impl()
def _commit_impl(self):
- dispatch.on_commit(self)
+ dispatch.commit(self)
return super(EventListenerConnection, self).\
_commit_impl()
def _savepoint_impl(self, name=None):
- dispatch.on_savepoint(self, name)
+ dispatch.savepoint(self, name)
return super(EventListenerConnection, self).\
_savepoint_impl(name=name)
def _rollback_to_savepoint_impl(self, name, context):
- dispatch.on_rollback_savepoint(self, name, context)
+ dispatch.rollback_savepoint(self, name, context)
return super(EventListenerConnection, self).\
_rollback_to_savepoint_impl(name, context)
def _release_savepoint_impl(self, name, context):
- dispatch.on_release_savepoint(self, name, context)
+ dispatch.release_savepoint(self, name, context)
return super(EventListenerConnection, self).\
_release_savepoint_impl(name, context)
def _begin_twophase_impl(self, xid):
- dispatch.on_begin_twophase(self, xid)
+ dispatch.begin_twophase(self, xid)
return super(EventListenerConnection, self).\
_begin_twophase_impl(xid)
def _prepare_twophase_impl(self, xid):
- dispatch.on_prepare_twophase(self, xid)
+ dispatch.prepare_twophase(self, xid)
return super(EventListenerConnection, self).\
_prepare_twophase_impl(xid)
def _rollback_twophase_impl(self, xid, is_prepared):
- dispatch.on_rollback_twophase(self, xid)
+ dispatch.rollback_twophase(self, xid)
return super(EventListenerConnection, self).\
_rollback_twophase_impl(xid, is_prepared)
def _commit_twophase_impl(self, xid, is_prepared):
- dispatch.on_commit_twophase(self, xid, is_prepared)
+ dispatch.commit_twophase(self, xid, is_prepared)
return super(EventListenerConnection, self).\
_commit_twophase_impl(xid, is_prepared)
diff --git a/lib/sqlalchemy/engine/ddl.py b/lib/sqlalchemy/engine/ddl.py
index 0e165b8f5..76af06529 100644
--- a/lib/sqlalchemy/engine/ddl.py
+++ b/lib/sqlalchemy/engine/ddl.py
@@ -35,20 +35,20 @@ class SchemaGenerator(DDLBase):
tables = metadata.tables.values()
collection = [t for t in sql_util.sort_tables(tables) if self._can_create(t)]
- metadata.dispatch.on_before_create(metadata, self.connection,
+ metadata.dispatch.before_create(metadata, self.connection,
tables=collection)
for table in collection:
self.traverse_single(table, create_ok=True)
- metadata.dispatch.on_after_create(metadata, self.connection,
+ metadata.dispatch.after_create(metadata, self.connection,
tables=collection)
def visit_table(self, table, create_ok=False):
if not create_ok and not self._can_create(table):
return
- table.dispatch.on_before_create(table, self.connection)
+ table.dispatch.before_create(table, self.connection)
for column in table.columns:
if column.default is not None:
@@ -60,7 +60,7 @@ class SchemaGenerator(DDLBase):
for index in table.indexes:
self.traverse_single(index)
- table.dispatch.on_after_create(table, self.connection)
+ table.dispatch.after_create(table, self.connection)
def visit_sequence(self, sequence):
if self.dialect.supports_sequences:
@@ -89,13 +89,13 @@ class SchemaDropper(DDLBase):
tables = metadata.tables.values()
collection = [t for t in reversed(sql_util.sort_tables(tables)) if self._can_drop(t)]
- metadata.dispatch.on_before_drop(metadata, self.connection,
+ metadata.dispatch.before_drop(metadata, self.connection,
tables=collection)
for table in collection:
self.traverse_single(table, drop_ok=True)
- metadata.dispatch.on_after_drop(metadata, self.connection,
+ metadata.dispatch.after_drop(metadata, self.connection,
tables=collection)
def _can_drop(self, table):
@@ -111,7 +111,7 @@ class SchemaDropper(DDLBase):
if not drop_ok and not self._can_drop(table):
return
- table.dispatch.on_before_drop(table, self.connection)
+ table.dispatch.before_drop(table, self.connection)
for column in table.columns:
if column.default is not None:
@@ -119,7 +119,7 @@ class SchemaDropper(DDLBase):
self.connection.execute(schema.DropTable(table))
- table.dispatch.on_after_drop(table, self.connection)
+ table.dispatch.after_drop(table, self.connection)
def visit_sequence(self, sequence):
if self.dialect.supports_sequences:
diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py
index 5a81bd5f2..1191006c4 100644
--- a/lib/sqlalchemy/engine/strategies.py
+++ b/lib/sqlalchemy/engine/strategies.py
@@ -137,13 +137,13 @@ class DefaultEngineStrategy(EngineStrategy):
return
do_on_connect(conn)
- event.listen(pool, 'on_first_connect', on_connect)
- event.listen(pool, 'on_connect', on_connect)
+ event.listen(pool, 'first_connect', on_connect)
+ event.listen(pool, 'connect', on_connect)
def first_connect(dbapi_connection, connection_record):
c = base.Connection(engine, connection=dbapi_connection)
dialect.initialize(c)
- event.listen(pool, 'on_first_connect', first_connect)
+ event.listen(pool, 'first_connect', first_connect)
return engine
diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py
index 5801c94a7..6785c7968 100644
--- a/lib/sqlalchemy/events.py
+++ b/lib/sqlalchemy/events.py
@@ -17,11 +17,11 @@ class DDLEvents(event.Events):
m = MetaData()
some_table = Table('some_table', m, Column('data', Integer))
- def on_after_create(target, connection, **kw):
+ def after_create(target, connection, **kw):
connection.execute("ALTER TABLE %s SET name=foo_%s" %
(target.name, target.name))
- event.listen(some_table, "on_after_create", on_after_create)
+ event.listen(some_table, "after_create", after_create)
DDL events integrate closely with the
:class:`.DDL` class and the :class:`.DDLElement` hierarchy
@@ -31,7 +31,7 @@ class DDLEvents(event.Events):
from sqlalchemy import DDL
event.listen(
some_table,
- "on_after_create",
+ "after_create",
DDL("ALTER TABLE %(table)s SET name=foo_%(table)s")
)
@@ -51,7 +51,7 @@ class DDLEvents(event.Events):
"""
- def on_before_create(self, target, connection, **kw):
+ def before_create(self, target, connection, **kw):
"""Called before CREATE statments are emitted.
:param target: the :class:`.MetaData` or :class:`.Table`
@@ -66,7 +66,7 @@ class DDLEvents(event.Events):
"""
- def on_after_create(self, target, connection, **kw):
+ def after_create(self, target, connection, **kw):
"""Called after CREATE statments are emitted.
:param target: the :class:`.MetaData` or :class:`.Table`
@@ -81,7 +81,7 @@ class DDLEvents(event.Events):
"""
- def on_before_drop(self, target, connection, **kw):
+ def before_drop(self, target, connection, **kw):
"""Called before DROP statments are emitted.
:param target: the :class:`.MetaData` or :class:`.Table`
@@ -96,7 +96,7 @@ class DDLEvents(event.Events):
"""
- def on_after_drop(self, target, connection, **kw):
+ def after_drop(self, target, connection, **kw):
"""Called after DROP statments are emitted.
:param target: the :class:`.MetaData` or :class:`.Table`
@@ -126,7 +126,7 @@ class PoolEvents(event.Events):
def my_on_checkout(dbapi_conn, connection_rec, connection_proxy):
"handle an on checkout event"
- events.listen(Pool, 'on_checkout', my_on_checkout)
+ events.listen(Pool, 'checkout', my_on_checkout)
In addition to accepting the :class:`.Pool` class and :class:`.Pool` instances,
:class:`.PoolEvents` also accepts :class:`.Engine` objects and
@@ -137,7 +137,7 @@ class PoolEvents(event.Events):
engine = create_engine("postgresql://scott:tiger@localhost/test")
# will associate with engine.pool
- events.listen(engine, 'on_checkout', my_on_checkout)
+ events.listen(engine, 'checkout', my_on_checkout)
"""
@@ -156,7 +156,7 @@ class PoolEvents(event.Events):
else:
return target
- def on_connect(self, dbapi_connection, connection_record):
+ def connect(self, dbapi_connection, connection_record):
"""Called once for each new DB-API connection or Pool's ``creator()``.
:param dbapi_con:
@@ -168,7 +168,7 @@ class PoolEvents(event.Events):
"""
- def on_first_connect(self, dbapi_connection, connection_record):
+ def first_connect(self, dbapi_connection, connection_record):
"""Called exactly once for the first DB-API connection.
:param dbapi_con:
@@ -180,7 +180,7 @@ class PoolEvents(event.Events):
"""
- def on_checkout(self, dbapi_connection, connection_record, connection_proxy):
+ def checkout(self, dbapi_connection, connection_record, connection_proxy):
"""Called when a connection is retrieved from the Pool.
:param dbapi_con:
@@ -199,7 +199,7 @@ class PoolEvents(event.Events):
using the new connection.
"""
- def on_checkin(self, dbapi_connection, connection_record):
+ def checkin(self, dbapi_connection, connection_record):
"""Called when a connection returns to the pool.
Note that the connection may be closed, and may be None if the
@@ -223,16 +223,16 @@ class EngineEvents(event.Events):
from sqlalchemy import event, create_engine
- def on_before_execute(conn, clauseelement, multiparams, params):
+ def before_execute(conn, clauseelement, multiparams, params):
log.info("Received statement: %s" % clauseelement)
engine = create_engine('postgresql://scott:tiger@localhost/test')
- event.listen(engine, "on_before_execute", on_before_execute)
+ event.listen(engine, "before_execute", before_execute)
Some events allow modifiers to the listen() function.
- :param retval=False: Applies to the :meth:`.on_before_execute` and
- :meth:`.on_before_cursor_execute` events only. When True, the
+ :param retval=False: Applies to the :meth:`.before_execute` and
+ :meth:`.before_cursor_execute` events only. When True, the
user-defined event function must have a return value, which
is a tuple of parameters that replace the given statement
and parameters. See those methods for a description of
@@ -250,13 +250,13 @@ class EngineEvents(event.Events):
target.dispatch)
if not retval:
- if identifier == 'on_before_execute':
+ if identifier == 'before_execute':
orig_fn = fn
def wrap(conn, clauseelement, multiparams, params):
orig_fn(conn, clauseelement, multiparams, params)
return clauseelement, multiparams, params
fn = wrap
- elif identifier == 'on_before_cursor_execute':
+ elif identifier == 'before_cursor_execute':
orig_fn = fn
def wrap(conn, cursor, statement,
parameters, context, executemany):
@@ -265,55 +265,55 @@ class EngineEvents(event.Events):
return statement, parameters
fn = wrap
- elif retval and identifier not in ('on_before_execute', 'on_before_cursor_execute'):
+ elif retval and identifier not in ('before_execute', 'before_cursor_execute'):
raise exc.ArgumentError(
- "Only the 'on_before_execute' and "
- "'on_before_cursor_execute' engine "
+ "Only the 'before_execute' and "
+ "'before_cursor_execute' engine "
"event listeners accept the 'retval=True' "
"argument.")
event.Events._listen(target, identifier, fn)
- def on_before_execute(self, conn, clauseelement, multiparams, params):
+ def before_execute(self, conn, clauseelement, multiparams, params):
"""Intercept high level execute() events."""
- def on_after_execute(self, conn, clauseelement, multiparams, params, result):
+ def after_execute(self, conn, clauseelement, multiparams, params, result):
"""Intercept high level execute() events."""
- def on_before_cursor_execute(self, conn, cursor, statement,
+ def before_cursor_execute(self, conn, cursor, statement,
parameters, context, executemany):
"""Intercept low-level cursor execute() events."""
- def on_after_cursor_execute(self, conn, cursor, statement,
+ def after_cursor_execute(self, conn, cursor, statement,
parameters, context, executemany):
"""Intercept low-level cursor execute() events."""
- def on_begin(self, conn):
+ def begin(self, conn):
"""Intercept begin() events."""
- def on_rollback(self, conn):
+ def rollback(self, conn):
"""Intercept rollback() events."""
- def on_commit(self, conn):
+ def commit(self, conn):
"""Intercept commit() events."""
- def on_savepoint(self, conn, name=None):
+ def savepoint(self, conn, name=None):
"""Intercept savepoint() events."""
- def on_rollback_savepoint(self, conn, name, context):
+ def rollback_savepoint(self, conn, name, context):
"""Intercept rollback_savepoint() events."""
- def on_release_savepoint(self, conn, name, context):
+ def release_savepoint(self, conn, name, context):
"""Intercept release_savepoint() events."""
- def on_begin_twophase(self, conn, xid):
+ def begin_twophase(self, conn, xid):
"""Intercept begin_twophase() events."""
- def on_prepare_twophase(self, conn, xid):
+ def prepare_twophase(self, conn, xid):
"""Intercept prepare_twophase() events."""
- def on_rollback_twophase(self, conn, xid, is_prepared):
+ def rollback_twophase(self, conn, xid, is_prepared):
"""Intercept rollback_twophase() events."""
- def on_commit_twophase(self, conn, xid, is_prepared):
+ def commit_twophase(self, conn, xid, is_prepared):
"""Intercept commit_twophase() events."""
diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py
index 7dcbfd996..f3bd91efb 100644
--- a/lib/sqlalchemy/ext/mutable.py
+++ b/lib/sqlalchemy/ext/mutable.py
@@ -25,7 +25,7 @@ class Mutable(object):
return weakref.WeakKeyDictionary()
- def on_change(self):
+ def change(self):
"""Subclasses should call this method whenever change events occur."""
for parent, key in self._parents.items():
@@ -51,7 +51,7 @@ class Mutable(object):
key = attribute.key
parent_cls = attribute.class_
- def on_load(state):
+ def load(state):
"""Listen for objects loaded or refreshed.
Wrap the target data member's value with
@@ -64,7 +64,7 @@ class Mutable(object):
state.dict[key] = val
val._parents[state.obj()] = key
- def on_set(target, value, oldvalue, initiator):
+ def set(target, value, oldvalue, initiator):
"""Listen for set/replace events on the target
data member.
@@ -81,9 +81,9 @@ class Mutable(object):
oldvalue._parents.pop(state.obj(), None)
return value
- event.listen(parent_cls, 'on_load', on_load, raw=True)
- event.listen(parent_cls, 'on_refresh', on_load, raw=True)
- event.listen(attribute, 'on_set', on_set, raw=True, retval=True)
+ event.listen(parent_cls, 'load', load, raw=True)
+ event.listen(parent_cls, 'refresh', load, raw=True)
+ event.listen(attribute, 'set', set, raw=True, retval=True)
# TODO: need a deserialize hook here
@@ -109,7 +109,7 @@ class Mutable(object):
cls.associate_with_attribute(getattr(class_, prop.key))
break
- event.listen(mapper, 'on_mapper_configured', listen_for_type)
+ event.listen(mapper, 'mapper_configured', listen_for_type)
@classmethod
def as_mutable(cls, sqltype):
@@ -151,7 +151,7 @@ class Mutable(object):
cls.associate_with_attribute(getattr(class_, prop.key))
break
- event.listen(mapper, 'on_mapper_configured', listen_for_type)
+ event.listen(mapper, 'mapper_configured', listen_for_type)
return sqltype
@@ -168,7 +168,7 @@ class MutableComposite(object):
Composite classes, in addition to meeting the usage contract
defined in :ref:`mapper_composite`, also define some system
- of relaying change events to the given :meth:`.on_change`
+ of relaying change events to the given :meth:`.change`
method, which will notify all parents of the change. Below
the special Python method ``__setattr__`` is used to intercept
all changes::
@@ -180,7 +180,7 @@ class MutableComposite(object):
def __setattr__(self, key, value):
object.__setattr__(self, key, value)
- self.on_change()
+ self.change()
def __composite_values__(self):
return self.x, self.y
@@ -210,7 +210,7 @@ class MutableComposite(object):
return weakref.WeakKeyDictionary()
- def on_change(self):
+ def change(self):
"""Subclasses should call this method whenever change events occur."""
for parent, key in self._parents.items():
@@ -230,7 +230,7 @@ class MutableComposite(object):
key = attribute.key
parent_cls = attribute.class_
- def on_load(state):
+ def load(state):
"""Listen for objects loaded or refreshed.
Wrap the target data member's value with
@@ -242,7 +242,7 @@ class MutableComposite(object):
if val is not None:
val._parents[state.obj()] = key
- def on_set(target, value, oldvalue, initiator):
+ def set(target, value, oldvalue, initiator):
"""Listen for set/replace events on the target
data member.
@@ -257,9 +257,9 @@ class MutableComposite(object):
oldvalue._parents.pop(state.obj(), None)
return value
- event.listen(parent_cls, 'on_load', on_load, raw=True)
- event.listen(parent_cls, 'on_refresh', on_load, raw=True)
- event.listen(attribute, 'on_set', on_set, raw=True, retval=True)
+ event.listen(parent_cls, 'load', load, raw=True)
+ event.listen(parent_cls, 'refresh', load, raw=True)
+ event.listen(attribute, 'set', set, raw=True, retval=True)
# TODO: need a deserialize hook here
@@ -277,5 +277,5 @@ class MutableComposite(object):
if hasattr(prop, 'composite_class') and issubclass(prop.composite_class, cls):
cls._listen_on_attribute(getattr(class_, prop.key))
- event.listen(mapper, 'on_mapper_configured', listen_for_type)
+ event.listen(mapper, 'mapper_configured', listen_for_type)
diff --git a/lib/sqlalchemy/interfaces.py b/lib/sqlalchemy/interfaces.py
index 26910a5e6..f30602a1e 100644
--- a/lib/sqlalchemy/interfaces.py
+++ b/lib/sqlalchemy/interfaces.py
@@ -78,13 +78,13 @@ class PoolListener(object):
listener = util.as_interface(listener, methods=('connect',
'first_connect', 'checkout', 'checkin'))
if hasattr(listener, 'connect'):
- event.listen(self, 'on_connect', listener.connect)
+ event.listen(self, 'connect', listener.connect)
if hasattr(listener, 'first_connect'):
- event.listen(self, 'on_first_connect', listener.first_connect)
+ event.listen(self, 'first_connect', listener.first_connect)
if hasattr(listener, 'checkout'):
- event.listen(self, 'on_checkout', listener.checkout)
+ event.listen(self, 'checkout', listener.checkout)
if hasattr(listener, 'checkin'):
- event.listen(self, 'on_checkin', listener.checkin)
+ event.listen(self, 'checkin', listener.checkin)
def connect(self, dbapi_con, con_record):
@@ -187,7 +187,7 @@ class ConnectionProxy(object):
clauseelement, *multiparams,
**params)
- event.listen(self, 'on_before_execute', adapt_execute)
+ event.listen(self, 'before_execute', adapt_execute)
def adapt_cursor_execute(conn, cursor, statement,
parameters,context, executemany, ):
@@ -209,7 +209,7 @@ class ConnectionProxy(object):
executemany,
)
- event.listen(self, 'on_before_cursor_execute', adapt_cursor_execute)
+ event.listen(self, 'before_cursor_execute', adapt_cursor_execute)
def do_nothing_callback(*arg, **kw):
pass
@@ -221,23 +221,23 @@ class ConnectionProxy(object):
return util.update_wrapper(go, fn)
- event.listen(self, 'on_begin', adapt_listener(listener.begin))
- event.listen(self, 'on_rollback',
+ event.listen(self, 'begin', adapt_listener(listener.begin))
+ event.listen(self, 'rollback',
adapt_listener(listener.rollback))
- event.listen(self, 'on_commit', adapt_listener(listener.commit))
- event.listen(self, 'on_savepoint',
+ event.listen(self, 'commit', adapt_listener(listener.commit))
+ event.listen(self, 'savepoint',
adapt_listener(listener.savepoint))
- event.listen(self, 'on_rollback_savepoint',
+ event.listen(self, 'rollback_savepoint',
adapt_listener(listener.rollback_savepoint))
- event.listen(self, 'on_release_savepoint',
+ event.listen(self, 'release_savepoint',
adapt_listener(listener.release_savepoint))
- event.listen(self, 'on_begin_twophase',
+ event.listen(self, 'begin_twophase',
adapt_listener(listener.begin_twophase))
- event.listen(self, 'on_prepare_twophase',
+ event.listen(self, 'prepare_twophase',
adapt_listener(listener.prepare_twophase))
- event.listen(self, 'on_rollback_twophase',
+ event.listen(self, 'rollback_twophase',
adapt_listener(listener.rollback_twophase))
- event.listen(self, 'on_commit_twophase',
+ event.listen(self, 'commit_twophase',
adapt_listener(listener.commit_twophase))
diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py
index d32d4f1b1..816a12168 100644
--- a/lib/sqlalchemy/orm/attributes.py
+++ b/lib/sqlalchemy/orm/attributes.py
@@ -447,7 +447,7 @@ class ScalarAttributeImpl(AttributeImpl):
else:
old = dict_.get(self.key, NO_VALUE)
- if self.dispatch.on_remove:
+ if self.dispatch.remove:
self.fire_remove_event(state, dict_, old, None)
state.modified_event(dict_, self, old)
del dict_[self.key]
@@ -465,19 +465,19 @@ class ScalarAttributeImpl(AttributeImpl):
else:
old = dict_.get(self.key, NO_VALUE)
- if self.dispatch.on_set:
+ if self.dispatch.set:
value = self.fire_replace_event(state, dict_,
value, old, initiator)
state.modified_event(dict_, self, old)
dict_[self.key] = value
def fire_replace_event(self, state, dict_, value, previous, initiator):
- for fn in self.dispatch.on_set:
+ for fn in self.dispatch.set:
value = fn(state, value, previous, initiator or self)
return value
def fire_remove_event(self, state, dict_, value, initiator):
- for fn in self.dispatch.on_remove:
+ for fn in self.dispatch.remove:
fn(state, value, initiator or self)
@property
@@ -618,7 +618,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
if self.trackparent and value is not None:
self.sethasparent(instance_state(value), False)
- for fn in self.dispatch.on_remove:
+ for fn in self.dispatch.remove:
fn(state, value, initiator or self)
state.modified_event(dict_, self, value)
@@ -630,7 +630,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
previous is not PASSIVE_NO_RESULT):
self.sethasparent(instance_state(previous), False)
- for fn in self.dispatch.on_set:
+ for fn in self.dispatch.set:
value = fn(state, value, previous, initiator or self)
state.modified_event(dict_, self, previous)
@@ -709,7 +709,7 @@ class CollectionAttributeImpl(AttributeImpl):
def fire_append_event(self, state, dict_, value, initiator):
- for fn in self.dispatch.on_append:
+ for fn in self.dispatch.append:
value = fn(state, value, initiator or self)
state.modified_event(dict_, self, NEVER_SET, True)
@@ -726,7 +726,7 @@ class CollectionAttributeImpl(AttributeImpl):
if self.trackparent and value is not None:
self.sethasparent(instance_state(value), False)
- for fn in self.dispatch.on_remove:
+ for fn in self.dispatch.remove:
fn(state, value, initiator or self)
state.modified_event(dict_, self, NEVER_SET, True)
@@ -927,11 +927,11 @@ def backref_listeners(attribute, key, uselist):
passive=PASSIVE_NO_FETCH)
if uselist:
- event.listen(attribute, "on_append", append, retval=True, raw=True)
+ event.listen(attribute, "append", append, retval=True, raw=True)
else:
- event.listen(attribute, "on_set", set_, retval=True, raw=True)
+ event.listen(attribute, "set", set_, retval=True, raw=True)
# TODO: need coverage in test/orm/ of remove event
- event.listen(attribute, "on_remove", remove, retval=True, raw=True)
+ event.listen(attribute, "remove", remove, retval=True, raw=True)
class History(tuple):
"""A 3-tuple of added, unchanged and deleted values,
diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py
index 99e6464f2..f891e4901 100644
--- a/lib/sqlalchemy/orm/collections.py
+++ b/lib/sqlalchemy/orm/collections.py
@@ -180,7 +180,7 @@ class collection(object):
The decorators fall into two groups: annotations and interception recipes.
The annotating decorators (appender, remover, iterator,
- internally_instrumented, on_link) indicate the method's purpose and take no
+ internally_instrumented, link) indicate the method's purpose and take no
arguments. They are not written with parens::
@collection.appender
@@ -309,7 +309,7 @@ class collection(object):
return fn
@staticmethod
- def on_link(fn):
+ def link(fn):
"""Tag the method as a the "linked to attribute" event handler.
This optional event handler will be called when the collection class
@@ -319,7 +319,7 @@ class collection(object):
that has been linked, or None if unlinking.
"""
- setattr(fn, '_sa_instrument_role', 'on_link')
+ setattr(fn, '_sa_instrument_role', 'link')
return fn
@staticmethod
@@ -795,7 +795,7 @@ def _instrument_class(cls):
if hasattr(method, '_sa_instrument_role'):
role = method._sa_instrument_role
assert role in ('appender', 'remover', 'iterator',
- 'on_link', 'converter')
+ 'link', 'converter')
roles[role] = name
# transfer instrumentation requests from decorated function
diff --git a/lib/sqlalchemy/orm/deprecated_interfaces.py b/lib/sqlalchemy/orm/deprecated_interfaces.py
index 8ec5c8042..b294a8d7d 100644
--- a/lib/sqlalchemy/orm/deprecated_interfaces.py
+++ b/lib/sqlalchemy/orm/deprecated_interfaces.py
@@ -86,7 +86,7 @@ class MapperExtension(object):
def reconstruct(instance):
ls_meth(self, instance)
return reconstruct
- event.listen(self.class_manager, 'on_load',
+ event.listen(self.class_manager, 'load',
go(ls_meth), raw=False, propagate=True)
elif meth == 'init_instance':
def go(ls_meth):
@@ -95,7 +95,7 @@ class MapperExtension(object):
self.class_manager.original_init,
instance, args, kwargs)
return init_instance
- event.listen(self.class_manager, 'on_init',
+ event.listen(self.class_manager, 'init',
go(ls_meth), raw=False, propagate=True)
elif meth == 'init_failed':
def go(ls_meth):
@@ -105,10 +105,10 @@ class MapperExtension(object):
instance, args, kwargs)
return init_failed
- event.listen(self.class_manager, 'on_init_failure',
+ event.listen(self.class_manager, 'init_failure',
go(ls_meth), raw=False, propagate=True)
else:
- event.listen(self, "on_%s" % meth, ls_meth,
+ event.listen(self, "%s" % meth, ls_meth,
raw=False, retval=True, propagate=True)
@@ -395,16 +395,16 @@ class SessionExtension(object):
@classmethod
def _adapt_listener(cls, self, listener):
- event.listen(self, 'on_before_commit', listener.before_commit)
- event.listen(self, 'on_after_commit', listener.after_commit)
- event.listen(self, 'on_after_rollback', listener.after_rollback)
- event.listen(self, 'on_before_flush', listener.before_flush)
- event.listen(self, 'on_after_flush', listener.after_flush)
- event.listen(self, 'on_after_flush_postexec', listener.after_flush_postexec)
- event.listen(self, 'on_after_begin', listener.after_begin)
- event.listen(self, 'on_after_attach', listener.after_attach)
- event.listen(self, 'on_after_bulk_update', listener.after_bulk_update)
- event.listen(self, 'on_after_bulk_delete', listener.after_bulk_delete)
+ event.listen(self, 'before_commit', listener.before_commit)
+ event.listen(self, 'after_commit', listener.after_commit)
+ event.listen(self, 'after_rollback', listener.after_rollback)
+ event.listen(self, 'before_flush', listener.before_flush)
+ event.listen(self, 'after_flush', listener.after_flush)
+ event.listen(self, 'after_flush_postexec', listener.after_flush_postexec)
+ event.listen(self, 'after_begin', listener.after_begin)
+ event.listen(self, 'after_attach', listener.after_attach)
+ event.listen(self, 'after_bulk_update', listener.after_bulk_update)
+ event.listen(self, 'after_bulk_delete', listener.after_bulk_delete)
def before_commit(self, session):
"""Execute right before commit is called.
@@ -534,13 +534,13 @@ class AttributeExtension(object):
@classmethod
def _adapt_listener(cls, self, listener):
- event.listen(self, 'on_append', listener.append,
+ event.listen(self, 'append', listener.append,
active_history=listener.active_history,
raw=True, retval=True)
- event.listen(self, 'on_remove', listener.remove,
+ event.listen(self, 'remove', listener.remove,
active_history=listener.active_history,
raw=True, retval=True)
- event.listen(self, 'on_set', listener.set,
+ event.listen(self, 'set', listener.set,
active_history=listener.active_history,
raw=True, retval=True)
diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py
index d0f871664..2c3a7559d 100644
--- a/lib/sqlalchemy/orm/descriptor_props.py
+++ b/lib/sqlalchemy/orm/descriptor_props.py
@@ -116,7 +116,7 @@ class CompositeProperty(DescriptorProperty):
state = attributes.instance_state(instance)
attr = state.manager[self.key]
previous = dict_.get(self.key, attributes.NO_VALUE)
- for fn in attr.dispatch.on_set:
+ for fn in attr.dispatch.set:
value = fn(state, value, previous, attr.impl)
dict_[self.key] = value
if value is None:
@@ -133,7 +133,7 @@ class CompositeProperty(DescriptorProperty):
dict_ = attributes.instance_dict(instance)
previous = dict_.pop(self.key, attributes.NO_VALUE)
attr = state.manager[self.key]
- attr.dispatch.on_remove(state, previous, attr.impl)
+ attr.dispatch.remove(state, previous, attr.impl)
for key in self._attribute_keys:
setattr(instance, key, None)
@@ -183,13 +183,13 @@ class CompositeProperty(DescriptorProperty):
self._attribute_keys]
)
- event.listen(self.parent, 'on_after_insert',
+ event.listen(self.parent, 'after_insert',
insert_update_handler, raw=True)
- event.listen(self.parent, 'on_after_update',
+ event.listen(self.parent, 'after_update',
insert_update_handler, raw=True)
- event.listen(self.parent, 'on_load', load_handler, raw=True)
- event.listen(self.parent, 'on_refresh', load_handler, raw=True)
- event.listen(self.parent, "on_expire", expire_handler, raw=True)
+ event.listen(self.parent, 'load', load_handler, raw=True)
+ event.listen(self.parent, 'refresh', load_handler, raw=True)
+ event.listen(self.parent, "expire", expire_handler, raw=True)
# TODO: need a deserialize hook here
diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py
index 92bd78a58..9c0211cc5 100644
--- a/lib/sqlalchemy/orm/dynamic.py
+++ b/lib/sqlalchemy/orm/dynamic.py
@@ -76,7 +76,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
collection_history = self._modified_event(state, dict_)
collection_history.added_items.append(value)
- for fn in self.dispatch.on_append:
+ for fn in self.dispatch.append:
value = fn(state, value, initiator or self)
if self.trackparent and value is not None:
@@ -89,7 +89,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
if self.trackparent and value is not None:
self.sethasparent(attributes.instance_state(value), False)
- for fn in self.dispatch.on_remove:
+ for fn in self.dispatch.remove:
fn(state, value, initiator or self)
def _modified_event(self, state, dict_):
diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py
index e8dd07142..bb011e5f7 100644
--- a/lib/sqlalchemy/orm/events.py
+++ b/lib/sqlalchemy/orm/events.py
@@ -33,7 +33,7 @@ class InstrumentationEvents(event.Events):
def _remove(cls, identifier, target, fn):
raise NotImplementedError("Removal of instrumentation events not yet implemented")
- def on_class_instrument(self, cls):
+ def class_instrument(self, cls):
"""Called after the given class is instrumented.
To get at the :class:`.ClassManager`, use
@@ -41,7 +41,7 @@ class InstrumentationEvents(event.Events):
"""
- def on_class_uninstrument(self, cls):
+ def class_uninstrument(self, cls):
"""Called before the given class is uninstrumented.
To get at the :class:`.ClassManager`, use
@@ -50,7 +50,7 @@ class InstrumentationEvents(event.Events):
"""
- def on_attribute_instrument(self, cls, key, inst):
+ def attribute_instrument(self, cls, key, inst):
"""Called when an attribute is instrumented."""
class InstanceEvents(event.Events):
@@ -97,12 +97,12 @@ class InstanceEvents(event.Events):
def _remove(cls, identifier, target, fn):
raise NotImplementedError("Removal of instance events not yet implemented")
- def on_first_init(self, manager, cls):
+ def first_init(self, manager, cls):
"""Called when the first instance of a particular mapping is called.
"""
- def on_init(self, target, args, kwargs):
+ def init(self, target, args, kwargs):
"""Receive an instance when it's constructor is called.
This method is only called during a userland construction of
@@ -111,7 +111,7 @@ class InstanceEvents(event.Events):
"""
- def on_init_failure(self, target, args, kwargs):
+ def init_failure(self, target, args, kwargs):
"""Receive an instance when it's constructor has been called,
and raised an exception.
@@ -121,7 +121,7 @@ class InstanceEvents(event.Events):
"""
- def on_load(self, target):
+ def load(self, target):
"""Receive an object instance after it has been created via
``__new__``, and after initial attribute population has
occurred.
@@ -137,7 +137,7 @@ class InstanceEvents(event.Events):
"""
- def on_refresh(self, target):
+ def refresh(self, target):
"""Receive an object instance after one or more attributes have
been refreshed.
@@ -145,7 +145,7 @@ class InstanceEvents(event.Events):
"""
- def on_expire(self, target, keys):
+ def expire(self, target, keys):
"""Receive an object instance after its attributes or some subset
have been expired.
@@ -154,7 +154,7 @@ class InstanceEvents(event.Events):
"""
- def on_resurrect(self, target):
+ def resurrect(self, target):
"""Receive an object instance as it is 'resurrected' from
garbage collection, which occurs when a "dirty" state falls
out of scope."""
@@ -175,8 +175,8 @@ class MapperEvents(event.Events):
% target.special_number)
# associate the listener function with SomeMappedClass,
- # to execute during the "on_before_insert" hook
- event.listen(SomeMappedClass, 'on_before_insert', my_before_insert_listener)
+ # to execute during the "before_insert" hook
+ event.listen(SomeMappedClass, 'before_insert', my_before_insert_listener)
Available targets include mapped classes, instances of
:class:`.Mapper` (i.e. returned by :func:`.mapper`,
@@ -190,18 +190,18 @@ class MapperEvents(event.Events):
log.debug("Instance %s being inserted" % target)
# attach to all mappers
- event.listen(mapper, 'on_before_insert', some_listener)
+ event.listen(mapper, 'before_insert', some_listener)
Mapper events provide hooks into critical sections of the
mapper, including those related to object instrumentation,
object loading, and object persistence. In particular, the
- persistence methods :meth:`~.MapperEvents.on_before_insert`,
- and :meth:`~.MapperEvents.on_before_update` are popular
+ persistence methods :meth:`~.MapperEvents.before_insert`,
+ and :meth:`~.MapperEvents.before_update` are popular
places to augment the state being persisted - however, these
methods operate with several significant restrictions. The
user is encouraged to evaluate the
- :meth:`.SessionEvents.on_before_flush` and
- :meth:`.SessionEvents.on_after_flush` methods as more
+ :meth:`.SessionEvents.before_flush` and
+ :meth:`.SessionEvents.after_flush` methods as more
flexible and user-friendly hooks in which to apply
additional database state during a flush.
@@ -226,8 +226,8 @@ class MapperEvents(event.Events):
* ``sqlalchemy.orm.interfaces.EXT_STOP`` - cancel all subsequent
event handlers in the chain.
* other values - the return value specified by specific listeners,
- such as :meth:`~.MapperEvents.on_translate_row` or
- :meth:`~.MapperEvents.on_create_instance`.
+ such as :meth:`~.MapperEvents.translate_row` or
+ :meth:`~.MapperEvents.create_instance`.
"""
@@ -275,7 +275,7 @@ class MapperEvents(event.Events):
else:
event.Events._listen(target, identifier, fn)
- def on_instrument_class(self, mapper, class_):
+ def instrument_class(self, mapper, class_):
"""Receive a class when the mapper is first constructed,
before instrumentation is applied to the mapped class.
@@ -291,7 +291,7 @@ class MapperEvents(event.Events):
"""
- def on_mapper_configured(self, mapper, class_):
+ def mapper_configured(self, mapper, class_):
"""Called when the mapper for the class is fully configured.
This event is the latest phase of mapper construction.
@@ -304,7 +304,7 @@ class MapperEvents(event.Events):
"""
# TODO: need coverage for this event
- def on_translate_row(self, mapper, context, row):
+ def translate_row(self, mapper, context, row):
"""Perform pre-processing on the given result row and return a
new row instance.
@@ -332,7 +332,7 @@ class MapperEvents(event.Events):
"""
- def on_create_instance(self, mapper, context, row, class_):
+ def create_instance(self, mapper, context, row, class_):
"""Receive a row when a new object instance is about to be
created from that row.
@@ -356,7 +356,7 @@ class MapperEvents(event.Events):
"""
- def on_append_result(self, mapper, context, row, target,
+ def append_result(self, mapper, context, row, target,
result, **flags):
"""Receive an object instance before that instance is appended
to a result list.
@@ -389,7 +389,7 @@ class MapperEvents(event.Events):
"""
- def on_populate_instance(self, mapper, context, row,
+ def populate_instance(self, mapper, context, row,
target, **flags):
"""Receive an instance before that instance has
its attributes populated.
@@ -402,7 +402,7 @@ class MapperEvents(event.Events):
Most usages of this hook are obsolete. For a
generic "object has been newly created from a row" hook, use
- :meth:`.InstanceEvents.on_load`.
+ :meth:`.InstanceEvents.load`.
:param mapper: the :class:`.Mapper` which is the target
of this event.
@@ -420,7 +420,7 @@ class MapperEvents(event.Events):
"""
- def on_before_insert(self, mapper, connection, target):
+ def before_insert(self, mapper, connection, target):
"""Receive an object instance before an INSERT statement
is emitted corresponding to that instance.
@@ -460,7 +460,7 @@ class MapperEvents(event.Events):
"""
- def on_after_insert(self, mapper, connection, target):
+ def after_insert(self, mapper, connection, target):
"""Receive an object instance after an INSERT statement
is emitted corresponding to that instance.
@@ -492,7 +492,7 @@ class MapperEvents(event.Events):
"""
- def on_before_update(self, mapper, connection, target):
+ def before_update(self, mapper, connection, target):
"""Receive an object instance before an UPDATE statement
is emitted corresponding to that instance.
@@ -509,7 +509,7 @@ class MapperEvents(event.Events):
collections are modified. If, at update time, no
column-based attributes have any net changes, no UPDATE
statement will be issued. This means that an instance
- being sent to :meth:`~.MapperEvents.on_before_update` is
+ being sent to :meth:`~.MapperEvents.before_update` is
*not* a guarantee that an UPDATE statement will be
issued, although you can affect the outcome here by
modifying attributes so that a net change in value does
@@ -550,7 +550,7 @@ class MapperEvents(event.Events):
:return: No return value is supported by this event.
"""
- def on_after_update(self, mapper, connection, target):
+ def after_update(self, mapper, connection, target):
"""Receive an object instance after an UPDATE statement
is emitted corresponding to that instance.
@@ -568,7 +568,7 @@ class MapperEvents(event.Events):
collections are modified. If, at update time, no
column-based attributes have any net changes, no UPDATE
statement will be issued. This means that an instance
- being sent to :meth:`~.MapperEvents.on_after_update` is
+ being sent to :meth:`~.MapperEvents.after_update` is
*not* a guarantee that an UPDATE statement has been
issued.
@@ -600,7 +600,7 @@ class MapperEvents(event.Events):
"""
- def on_before_delete(self, mapper, connection, target):
+ def before_delete(self, mapper, connection, target):
"""Receive an object instance before a DELETE statement
is emitted corresponding to that instance.
@@ -634,7 +634,7 @@ class MapperEvents(event.Events):
"""
- def on_after_delete(self, mapper, connection, target):
+ def after_delete(self, mapper, connection, target):
"""Receive an object instance after a DELETE statement
has been emitted corresponding to that instance.
@@ -677,7 +677,7 @@ class SessionEvents(event.Events):
Session = sessionmaker()
- event.listen(Session, "on_before_commit", my_before_commit)
+ event.listen(Session, "before_commit", my_before_commit)
The :func:`~.event.listen` function will accept
:class:`.Session` objects as well as the return result
@@ -714,31 +714,31 @@ class SessionEvents(event.Events):
def _remove(cls, identifier, target, fn):
raise NotImplementedError("Removal of session events not yet implemented")
- def on_before_commit(self, session):
+ def before_commit(self, session):
"""Execute before commit is called.
Note that this may not be per-flush if a longer running
transaction is ongoing."""
- def on_after_commit(self, session):
+ def after_commit(self, session):
"""Execute after a commit has occured.
Note that this may not be per-flush if a longer running
transaction is ongoing."""
- def on_after_rollback(self, session):
+ def after_rollback(self, session):
"""Execute after a rollback has occured.
Note that this may not be per-flush if a longer running
transaction is ongoing."""
- def on_before_flush( self, session, flush_context, instances):
+ def before_flush( self, session, flush_context, instances):
"""Execute before flush process has started.
`instances` is an optional list of objects which were passed to
the ``flush()`` method. """
- def on_after_flush(self, session, flush_context):
+ def after_flush(self, session, flush_context):
"""Execute after flush has completed, but before commit has been
called.
@@ -746,7 +746,7 @@ class SessionEvents(event.Events):
'dirty', and 'deleted' lists still show pre-flush state as well
as the history settings on instance attributes."""
- def on_after_flush_postexec(self, session, flush_context):
+ def after_flush_postexec(self, session, flush_context):
"""Execute after flush has completed, and after the post-exec
state occurs.
@@ -755,18 +755,18 @@ class SessionEvents(event.Events):
occured, depending on whether or not the flush started its own
transaction or participated in a larger transaction. """
- def on_after_begin( self, session, transaction, connection):
+ def after_begin( self, session, transaction, connection):
"""Execute after a transaction is begun on a connection
`transaction` is the SessionTransaction. This method is called
after an engine level transaction is begun on a connection. """
- def on_after_attach(self, session, instance):
+ def after_attach(self, session, instance):
"""Execute after an instance is attached to a session.
This is called after an add, delete or merge. """
- def on_after_bulk_update( self, session, query, query_context, result):
+ def after_bulk_update( self, session, query, query_context, result):
"""Execute after a bulk update operation to the session.
This is called after a session.query(...).update()
@@ -776,7 +776,7 @@ class SessionEvents(event.Events):
`result` is the result object returned from the bulk operation.
"""
- def on_after_bulk_delete( self, session, query, query_context, result):
+ def after_bulk_delete( self, session, query, query_context, result):
"""Execute after a bulk delete operation to the session.
This is called after a session.query(...).delete()
@@ -800,7 +800,7 @@ class AttributeEvents(event.Events):
def my_append_listener(target, value, initiator):
print "received append event for target: %s" % target
- event.listen(MyClass.collection, 'on_append', my_append_listener)
+ event.listen(MyClass.collection, 'append', my_append_listener)
Listeners have the option to return a possibly modified version
of the value, when the ``retval=True`` flag is passed
@@ -813,7 +813,7 @@ class AttributeEvents(event.Events):
# setup listener on UserContact.phone attribute, instructing
# it to use the return value
- listen(UserContact.phone, 'on_set', validate_phone, retval=True)
+ listen(UserContact.phone, 'set', validate_phone, retval=True)
A validation function like the above can also raise an exception
such as :class:`ValueError` to halt the operation.
@@ -821,7 +821,7 @@ class AttributeEvents(event.Events):
Several modifiers are available to the :func:`~.event.listen` function.
:param active_history=False: When True, indicates that the
- "on_set" event would like to receive the "old" value being
+ "set" event would like to receive the "old" value being
replaced unconditionally, even if this requires firing off
database loads. Note that ``active_history`` can also be
set directly via :func:`.column_property` and
@@ -889,7 +889,7 @@ class AttributeEvents(event.Events):
def _remove(cls, identifier, target, fn):
raise NotImplementedError("Removal of attribute events not yet implemented")
- def on_append(self, target, value, initiator):
+ def append(self, target, value, initiator):
"""Receive a collection append event.
:param target: the object instance receiving the event.
@@ -906,7 +906,7 @@ class AttributeEvents(event.Events):
"""
- def on_remove(self, target, value, initiator):
+ def remove(self, target, value, initiator):
"""Receive a collection remove event.
:param target: the object instance receiving the event.
@@ -918,7 +918,7 @@ class AttributeEvents(event.Events):
:return: No return value is defined for this event.
"""
- def on_set(self, target, value, oldvalue, initiator):
+ def set(self, target, value, oldvalue, initiator):
"""Receive a scalar set event.
:param target: the object instance receiving the event.
diff --git a/lib/sqlalchemy/orm/instrumentation.py b/lib/sqlalchemy/orm/instrumentation.py
index 9876dde3f..8cf3b8580 100644
--- a/lib/sqlalchemy/orm/instrumentation.py
+++ b/lib/sqlalchemy/orm/instrumentation.py
@@ -168,7 +168,7 @@ class ClassManager(dict):
@util.memoized_property
def _state_constructor(self):
- self.dispatch.on_first_init(self, self.class_)
+ self.dispatch.first_init(self, self.class_)
if self.mutable_attributes:
return state.MutableAttrInstanceState
else:
@@ -211,7 +211,7 @@ class ClassManager(dict):
def post_configure_attribute(self, key):
instrumentation_registry.dispatch.\
- on_attribute_instrument(self.class_, key, self[key])
+ attribute_instrument(self.class_, key, self[key])
def uninstrument_attribute(self, key, propagated=False):
if key not in self:
@@ -527,7 +527,7 @@ class InstrumentationRegistry(object):
self._state_finders[class_] = manager.state_getter()
self._dict_finders[class_] = manager.dict_getter()
- self.dispatch.on_class_instrument(class_)
+ self.dispatch.class_instrument(class_)
return manager
@@ -595,7 +595,7 @@ class InstrumentationRegistry(object):
def unregister(self, class_):
if class_ in self._manager_finders:
manager = self.manager_of_class(class_)
- self.dispatch.on_class_uninstrument(class_)
+ self.dispatch.class_uninstrument(class_)
manager.unregister()
manager.dispose()
del self._manager_finders[class_]
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index 563de116a..a0265f9a8 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -395,7 +395,7 @@ class Mapper(object):
_mapper_registry[self] = True
- self.dispatch.on_instrument_class(self, self.class_)
+ self.dispatch.instrument_class(self, self.class_)
if manager is None:
manager = instrumentation.register_class(self.class_,
@@ -411,15 +411,15 @@ class Mapper(object):
if manager.info.get(_INSTRUMENTOR, False):
return
- event.listen(manager, 'on_first_init', _event_on_first_init, raw=True)
- event.listen(manager, 'on_init', _event_on_init, raw=True)
- event.listen(manager, 'on_resurrect', _event_on_resurrect, raw=True)
+ event.listen(manager, 'first_init', _event_on_first_init, raw=True)
+ event.listen(manager, 'init', _event_on_init, raw=True)
+ event.listen(manager, 'resurrect', _event_on_resurrect, raw=True)
for key, method in util.iterate_attributes(self.class_):
if isinstance(method, types.FunctionType):
if hasattr(method, '__sa_reconstructor__'):
self._reconstructor = method
- event.listen(manager, 'on_load', _event_on_load, raw=True)
+ event.listen(manager, 'load', _event_on_load, raw=True)
elif hasattr(method, '__sa_validators__'):
for name in method.__sa_validators__:
self._validators[name] = method
@@ -1597,9 +1597,9 @@ class Mapper(object):
# call before_XXX extensions
if not has_identity:
- mapper.dispatch.on_before_insert(mapper, conn, state)
+ mapper.dispatch.before_insert(mapper, conn, state)
else:
- mapper.dispatch.on_before_update(mapper, conn, state)
+ mapper.dispatch.before_update(mapper, conn, state)
# detect if we have a "pending" instance (i.e. has
# no instance_key attached to it), and another instance
@@ -1911,9 +1911,9 @@ class Mapper(object):
# call after_XXX extensions
if not has_identity:
- mapper.dispatch.on_after_insert(mapper, connection, state)
+ mapper.dispatch.after_insert(mapper, connection, state)
else:
- mapper.dispatch.on_after_update(mapper, connection, state)
+ mapper.dispatch.after_update(mapper, connection, state)
def _postfetch(self, uowtransaction, table,
state, dict_, prefetch_cols, postfetch_cols,
@@ -1988,7 +1988,7 @@ class Mapper(object):
else:
conn = connection
- mapper.dispatch.on_before_delete(mapper, conn, state)
+ mapper.dispatch.before_delete(mapper, conn, state)
tups.append((state,
state.dict,
@@ -2074,7 +2074,7 @@ class Mapper(object):
)
for state, state_dict, mapper, has_identity, connection in tups:
- mapper.dispatch.on_after_delete(mapper, connection, state)
+ mapper.dispatch.after_delete(mapper, connection, state)
def _instance_processor(self, context, path, reduced_path, adapter,
polymorphic_from=None,
@@ -2143,10 +2143,10 @@ class Mapper(object):
listeners = self.dispatch
- translate_row = listeners.on_translate_row or None
- create_instance = listeners.on_create_instance or None
- populate_instance = listeners.on_populate_instance or None
- append_result = listeners.on_append_result or None
+ translate_row = listeners.translate_row or None
+ create_instance = listeners.create_instance or None
+ populate_instance = listeners.populate_instance or None
+ append_result = listeners.append_result or None
populate_existing = context.populate_existing or self.always_refresh
if self.allow_partial_pks:
is_not_primary_key = _none_set.issuperset
@@ -2297,9 +2297,9 @@ class Mapper(object):
populate_state(state, dict_, row, isnew, attrs)
if loaded_instance:
- state.manager.dispatch.on_load(state)
+ state.manager.dispatch.load(state)
elif isnew:
- state.manager.dispatch.on_refresh(state)
+ state.manager.dispatch.refresh(state)
if result is not None:
if append_result:
@@ -2408,7 +2408,7 @@ def configure_mappers():
try:
mapper._post_configure_properties()
mapper._expire_memoizations()
- mapper.dispatch.on_mapper_configured(mapper, mapper.class_)
+ mapper.dispatch.mapper_configured(mapper, mapper.class_)
except:
exc = sys.exc_info()[1]
if not hasattr(exc, '_configure_failed'):
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index 74678a8d2..22f5e67c6 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -1803,7 +1803,7 @@ class Query(object):
filter = None
custom_rows = single_entity and \
- self._entities[0].mapper.dispatch.on_append_result
+ self._entities[0].mapper.dispatch.append_result
(process, labels) = \
zip(*[
@@ -2193,7 +2193,7 @@ class Query(object):
)
)
- session.dispatch.on_after_bulk_delete(session, self, context, result)
+ session.dispatch.after_bulk_delete(session, self, context, result)
return result.rowcount
@@ -2343,7 +2343,7 @@ class Query(object):
[_attr_as_key(k) for k in values]
)
- session.dispatch.on_after_bulk_update(session, self, context, result)
+ session.dispatch.after_bulk_update(session, self, context, result)
return result.rowcount
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index eba4ace8c..c4c2ee1e7 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -345,7 +345,7 @@ class SessionTransaction(object):
self._connections[conn] = self._connections[conn.engine] = \
(conn, transaction, conn is not bind)
- self.session.dispatch.on_after_begin(self.session, self, conn)
+ self.session.dispatch.after_begin(self.session, self, conn)
return conn
def prepare(self):
@@ -357,7 +357,7 @@ class SessionTransaction(object):
def _prepare_impl(self):
self._assert_is_active()
if self._parent is None or self.nested:
- self.session.dispatch.on_before_commit(self.session)
+ self.session.dispatch.before_commit(self.session)
stx = self.session.transaction
if stx is not self:
@@ -387,7 +387,7 @@ class SessionTransaction(object):
for t in set(self._connections.values()):
t[1].commit()
- self.session.dispatch.on_after_commit(self.session)
+ self.session.dispatch.after_commit(self.session)
if self.session._enable_transaction_accounting:
self._remove_snapshot()
@@ -424,7 +424,7 @@ class SessionTransaction(object):
if self.session._enable_transaction_accounting:
self._restore_snapshot()
- self.session.dispatch.on_after_rollback(self.session)
+ self.session.dispatch.after_rollback(self.session)
def _deactivate(self):
self._active = False
@@ -1255,7 +1255,7 @@ class Session(object):
merged_state.commit_all(merged_dict, self.identity_map)
if new_instance:
- merged_state.manager.dispatch.on_load(merged_state)
+ merged_state.manager.dispatch.load(merged_state)
return merged
@classmethod
@@ -1339,8 +1339,8 @@ class Session(object):
if state.session_id != self.hash_key:
state.session_id = self.hash_key
- if self.dispatch.on_after_attach:
- self.dispatch.on_after_attach(self, state.obj())
+ if self.dispatch.after_attach:
+ self.dispatch.after_attach(self, state.obj())
def __contains__(self, instance):
"""Return True if the instance is associated with this session.
@@ -1415,8 +1415,8 @@ class Session(object):
flush_context = UOWTransaction(self)
- if self.dispatch.on_before_flush:
- self.dispatch.on_before_flush(self, flush_context, objects)
+ if self.dispatch.before_flush:
+ self.dispatch.before_flush(self, flush_context, objects)
# re-establish "dirty states" in case the listeners
# added
dirty = self._dirty_states
@@ -1470,7 +1470,7 @@ class Session(object):
try:
flush_context.execute()
- self.dispatch.on_after_flush(self, flush_context)
+ self.dispatch.after_flush(self, flush_context)
transaction.commit()
except:
transaction.rollback(_capture_exception=True)
@@ -1486,7 +1486,7 @@ class Session(object):
# self.identity_map._modified.difference(objects)
#self.identity_map._modified.clear()
- self.dispatch.on_after_flush_postexec(self, flush_context)
+ self.dispatch.after_flush_postexec(self, flush_context)
def is_modified(self, instance, include_collections=True, passive=False):
"""Return ``True`` if instance has modified attributes.
diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py
index 89a84e898..f007665da 100644
--- a/lib/sqlalchemy/orm/state.py
+++ b/lib/sqlalchemy/orm/state.py
@@ -89,7 +89,7 @@ class InstanceState(object):
self, instance, args = mixed[0], mixed[1], mixed[2:]
manager = self.manager
- manager.dispatch.on_init(self, args, kwargs)
+ manager.dispatch.init(self, args, kwargs)
#if manager.mutable_attributes:
# assert self.__class__ is MutableAttrInstanceState
@@ -97,7 +97,7 @@ class InstanceState(object):
try:
return manager.original_init(*mixed[1:], **kwargs)
except:
- manager.dispatch.on_init_failure(self, args, kwargs)
+ manager.dispatch.init_failure(self, args, kwargs)
raise
def get_history(self, key, **kwargs):
@@ -232,7 +232,7 @@ class InstanceState(object):
self.callables[key] = self
dict_.pop(key, None)
- self.manager.dispatch.on_expire(self, None)
+ self.manager.dispatch.expire(self, None)
def expire_attributes(self, dict_, attribute_names):
pending = self.__dict__.get('pending', None)
@@ -250,7 +250,7 @@ class InstanceState(object):
if pending:
pending.pop(key, None)
- self.manager.dispatch.on_expire(self, attribute_names)
+ self.manager.dispatch.expire(self, attribute_names)
def __call__(self, passive):
"""__call__ allows the InstanceState to act as a deferred
@@ -516,7 +516,7 @@ class MutableAttrInstanceState(InstanceState):
obj.__dict__.update(self.mutable_dict)
# re-establishes identity attributes from the key
- self.manager.dispatch.on_resurrect(self)
+ self.manager.dispatch.resurrect(self)
return obj
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index 92fd74f78..3dfa15df1 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -1277,6 +1277,6 @@ def single_parent_validator(desc, prop):
def set_(state, value, oldvalue, initiator):
return _do_check(state, value, oldvalue, initiator)
- event.listen(desc, 'on_append', append, raw=True, retval=True, active_history=True)
- event.listen(desc, 'on_set', set_, raw=True, retval=True, active_history=True)
+ event.listen(desc, 'append', append, raw=True, retval=True, active_history=True)
+ event.listen(desc, 'set', set_, raw=True, retval=True, active_history=True)
diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py
index 0dd5640a8..b2798cf83 100644
--- a/lib/sqlalchemy/orm/unitofwork.py
+++ b/lib/sqlalchemy/orm/unitofwork.py
@@ -74,9 +74,9 @@ def track_cascade_events(descriptor, prop):
sess.expunge(oldvalue)
return newvalue
- event.listen(descriptor, 'on_append', append, raw=True, retval=True)
- event.listen(descriptor, 'on_remove', remove, raw=True, retval=True)
- event.listen(descriptor, 'on_set', set_, raw=True, retval=True)
+ event.listen(descriptor, 'append', append, raw=True, retval=True)
+ event.listen(descriptor, 'remove', remove, raw=True, retval=True)
+ event.listen(descriptor, 'set', set_, raw=True, retval=True)
class UOWTransaction(object):
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index 4a8b1713c..0cfdc6436 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -63,8 +63,8 @@ def _validator_events(desc, key, validator):
def set_(state, value, oldvalue, initiator):
return validator(state.obj(), key, value)
- event.listen(desc, 'on_append', append, raw=True, retval=True)
- event.listen(desc, 'on_set', set_, raw=True, retval=True)
+ event.listen(desc, 'append', append, raw=True, retval=True)
+ event.listen(desc, 'set', set_, raw=True, retval=True)
def polymorphic_union(table_map, typecolname, aliasname='p_union'):
"""Create a ``UNION`` statement used by a polymorphic mapper.
diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py
index 572087217..050b239c4 100644
--- a/lib/sqlalchemy/pool.py
+++ b/lib/sqlalchemy/pool.py
@@ -255,8 +255,8 @@ class _ConnectionRecord(object):
self.connection = self.__connect()
self.info = {}
- pool.dispatch.on_first_connect.exec_once(self.connection, self)
- pool.dispatch.on_connect(self.connection, self)
+ pool.dispatch.first_connect.exec_once(self.connection, self)
+ pool.dispatch.connect(self.connection, self)
def close(self):
if self.connection is not None:
@@ -284,8 +284,8 @@ class _ConnectionRecord(object):
if self.connection is None:
self.connection = self.__connect()
self.info.clear()
- if self.__pool.dispatch.on_connect:
- self.__pool.dispatch.on_connect(self.connection, self)
+ if self.__pool.dispatch.connect:
+ self.__pool.dispatch.connect(self.connection, self)
elif self.__pool._recycle > -1 and \
time.time() - self.starttime > self.__pool._recycle:
self.__pool.logger.info(
@@ -294,8 +294,8 @@ class _ConnectionRecord(object):
self.__close()
self.connection = self.__connect()
self.info.clear()
- if self.__pool.dispatch.on_connect:
- self.__pool.dispatch.on_connect(self.connection, self)
+ if self.__pool.dispatch.connect:
+ self.__pool.dispatch.connect(self.connection, self)
return self.connection
def __close(self):
@@ -348,8 +348,8 @@ def _finalize_fairy(connection, connection_record, pool, ref, echo):
if echo:
pool.logger.debug("Connection %r being returned to pool",
connection)
- if pool.dispatch.on_checkin:
- pool.dispatch.on_checkin(connection, connection_record)
+ if pool.dispatch.checkin:
+ pool.dispatch.checkin(connection, connection_record)
pool._return_conn(connection_record)
_refs = set()
@@ -435,14 +435,14 @@ class _ConnectionFairy(object):
raise exc.InvalidRequestError("This connection is closed")
self.__counter += 1
- if not self._pool.dispatch.on_checkout or self.__counter != 1:
+ if not self._pool.dispatch.checkout or self.__counter != 1:
return self
# Pool listeners can trigger a reconnection on checkout
attempts = 2
while attempts > 0:
try:
- self._pool.dispatch.on_checkout(self.connection,
+ self._pool.dispatch.checkout(self.connection,
self._connection_record,
self)
return self
diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py
index 8fd758d2d..371181fd8 100644
--- a/lib/sqlalchemy/schema.py
+++ b/lib/sqlalchemy/schema.py
@@ -383,7 +383,7 @@ class Table(SchemaItem, expression.TableClause):
def adapt_listener(target, connection, **kw):
listener(event_name, target, connection, **kw)
- event.listen(self, "on_" + event_name.replace('-', '_'), adapt_listener)
+ event.listen(self, "" + event_name.replace('-', '_'), adapt_listener)
def _set_parent(self, metadata):
metadata._add_table(self.name, self.schema, self)
@@ -1753,8 +1753,8 @@ class ForeignKeyConstraint(Constraint):
return table in set(kw['tables']) and \
bind.dialect.supports_alter
- event.listen(table.metadata, "on_after_create", AddConstraint(self, on=supports_alter))
- event.listen(table.metadata, "on_before_drop", DropConstraint(self, on=supports_alter))
+ event.listen(table.metadata, "after_create", AddConstraint(self, on=supports_alter))
+ event.listen(table.metadata, "before_drop", DropConstraint(self, on=supports_alter))
def copy(self, **kw):
@@ -2085,7 +2085,7 @@ class MetaData(SchemaItem):
def adapt_listener(target, connection, **kw):
listener(event, target, connection, **kw)
- event.listen(self, "on_" + event_name.replace('-', '_'), adapt_listener)
+ event.listen(self, "" + event_name.replace('-', '_'), adapt_listener)
def create_all(self, bind=None, tables=None, checkfirst=True):
"""Create all tables stored in this metadata.
@@ -2219,7 +2219,7 @@ class DDLElement(expression.Executable, expression.ClauseElement):
event.listen(
users,
- 'on_after_create',
+ 'after_create',
AddConstraint(constraint).execute_if(dialect='postgresql')
)
@@ -2309,7 +2309,7 @@ class DDLElement(expression.Executable, expression.ClauseElement):
target, connection, **kw):
return connection.execute(self.against(target))
- event.listen(target, "on_" + event_name.replace('-', '_'), call_event)
+ event.listen(target, "" + event_name.replace('-', '_'), call_event)
@expression._generative
def against(self, target):
@@ -2326,7 +2326,7 @@ class DDLElement(expression.Executable, expression.ClauseElement):
event.listen(
metadata,
- 'on_before_create',
+ 'before_create',
DDL("my_ddl").execute_if(dialect='postgresql')
)
@@ -2446,10 +2446,10 @@ class DDL(DDLElement):
from sqlalchemy import event, DDL
tbl = Table('users', metadata, Column('uid', Integer))
- event.listen(tbl, 'on_before_create', DDL('DROP TRIGGER users_trigger'))
+ event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger'))
spow = DDL('ALTER TABLE %(table)s SET secretpowers TRUE')
- event.listen(tbl, 'on_after_create', spow.execute_if(dialect='somedb'))
+ event.listen(tbl, 'after_create', spow.execute_if(dialect='somedb'))
drop_spow = DDL('ALTER TABLE users SET secretpowers FALSE')
connection.execute(drop_spow)