summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2010-11-18 18:50:24 -0500
committerMike Bayer <mike_mp@zzzcomputing.com>2010-11-18 18:50:24 -0500
commit6dbf2c3314a797a39624f1e68569bfbbb2b6ac87 (patch)
treeff03c9f0c42c14275882cbcc8b9fbe0bef13f535
parentf252af2b21c5bafeaa30aabcf65dfed9b5c01093 (diff)
parent2336b1cebfcb2f304e09cbc2a0e8bb3fb3a9ceeb (diff)
downloadsqlalchemy-6dbf2c3314a797a39624f1e68569bfbbb2b6ac87.tar.gz
merge tip
-rw-r--r--CHANGES43
-rw-r--r--doc/build/core/types.rst118
-rw-r--r--doc/build/orm/mapper_config.rst35
-rw-r--r--lib/sqlalchemy/dialects/oracle/cx_oracle.py123
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py69
-rw-r--r--lib/sqlalchemy/dialects/postgresql/pg8000.py22
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py15
-rwxr-xr-xlib/sqlalchemy/ext/declarative.py4
-rw-r--r--lib/sqlalchemy/orm/__init__.py67
-rw-r--r--lib/sqlalchemy/orm/interfaces.py4
-rw-r--r--lib/sqlalchemy/orm/mapper.py2
-rw-r--r--lib/sqlalchemy/orm/properties.py5
-rw-r--r--lib/sqlalchemy/orm/query.py5
-rw-r--r--lib/sqlalchemy/orm/strategies.py7
-rw-r--r--lib/sqlalchemy/processors.py6
-rw-r--r--lib/sqlalchemy/types.py6
-rw-r--r--test/dialect/test_oracle.py44
-rw-r--r--test/dialect/test_postgresql.py76
-rw-r--r--test/ext/test_declarative.py32
-rw-r--r--test/orm/test_generative.py11
-rw-r--r--test/orm/test_mapper.py11
-rw-r--r--test/orm/test_relationships.py389
22 files changed, 891 insertions, 203 deletions
diff --git a/CHANGES b/CHANGES
index 7f15effc1..86fac65a2 100644
--- a/CHANGES
+++ b/CHANGES
@@ -24,17 +24,45 @@ CHANGES
strategy would fail if the entity was an aliased()
construct. [ticket:1964]
+ - Fixed indexing of Query objects by -1. It was erroneously
+ transformed to the empty slice -1:0 that resulted in
+ IndexError. [ticket:1968]
+
+ - The mapper argument "primary_key" can be passed as a
+ single column as well as a list or tuple. [ticket:1971]
+ The documentation examples that illustrated it as a
+ scalar value have been changed to lists.
+
+ - Added active_history flag to relationship()
+ and column_property(), forces attribute events to
+ always load the "old" value, so that it's available to
+ attributes.get_history(). [ticket:1961]
+
- sql
- The 'info' attribute of Column is copied during
Column.copy(), i.e. as occurs when using columns
in declarative mixins. [ticket:1967]
+
+ - Added a bind processor for booleans which coerces
+ to int, for DBAPIs such as pymssql that naively call
+ str() on values.
- engine
- Implemented sequence check capability for the C
version of RowProxy, as well as 2.7 style
"collections.Sequence" registration for RowProxy.
[ticket:1871]
-
+
+- postgresql
+ - Ensured every numeric, float, int code, scalar + array,
+ are recognized by psycopg2 and pg8000's "numeric"
+ base type. [ticket:1955]
+
+ - Added as_uuid=True flag to the UUID type, will receive
+ and return values as Python UUID() objects rather than
+ strings. Currently, the UUID type is only known to
+ work with psycopg2. [ticket:1956]
+
- mysql
- Fixed error handling for Jython + zxjdbc, such that
has_table() property works again. Regression from
@@ -51,6 +79,19 @@ CHANGES
that includes a remote schema to a *different* schema
than that of the parent table doesn't render at all,
as cross-schema references do not appear to be supported.
+
+- oracle
+ - The cx_oracle "decimal detection" logic, which takes place
+ for for result set columns with ambiguous numeric characteristics,
+ now uses the decimal point character determined by the locale/
+ NLS_LANG setting, using an on-first-connect detection of
+ this character. cx_oracle 5.0.3 or greater is also required
+ when using a non-period-decimal-point NLS_LANG setting.
+ [ticket:1953].
+
+- declarative
+ - An error is raised if __table_args__ is not in tuple
+ or dict format, and is not None. [ticket:1972]
0.6.5
=====
diff --git a/doc/build/core/types.rst b/doc/build/core/types.rst
index 89a84abc8..ddb40d045 100644
--- a/doc/build/core/types.rst
+++ b/doc/build/core/types.rst
@@ -264,6 +264,124 @@ to and from the database is required.
:inherited-members:
:show-inheritance:
+A few key :class:`.TypeDecorator` recipes follow.
+
+Rounding Numerics
+^^^^^^^^^^^^^^^^^
+
+Some database connectors like those of SQL Server choke if a Decimal is passed with too
+many decimal places. Here's a recipe that rounds them down::
+
+ from sqlalchemy.types import TypeDecorator, Numeric
+ from decimal import Decimal
+
+ class SafeNumeric(TypeDecorator):
+ """Adds quantization to Numeric."""
+
+ impl = Numeric
+
+ def __init__(self, *arg, **kw):
+ TypeDecorator.__init__(self, *arg, **kw)
+ self.quantize_int = -(self.impl.precision - self.impl.scale)
+ self.quantize = Decimal(10) ** self.quantize_int
+
+ def process_bind_param(self, value, dialect):
+ if isinstance(value, Decimal) and \
+ value.as_tuple()[2] < self.quantize_int:
+ value = value.quantize(self.quantize)
+ return value
+
+Backend-agnostic GUID Type
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Receives and returns Python uuid() objects. Uses the PG UUID type
+when using Postgresql, CHAR(32) on other backends, storing them
+in stringified hex format. Can be modified to store
+binary in CHAR(16) if desired::
+
+ from sqlalchemy.types import TypeDecorator, CHAR
+ from sqlalchemy.dialects.postgresql import UUID
+ import uuid
+
+ class GUID(TypeDecorator):
+ """Platform-independent GUID type.
+
+ Uses Postgresql's UUID type, otherwise uses
+ CHAR(32), storing as stringified hex values.
+
+ """
+ impl = CHAR
+
+ def load_dialect_impl(self, dialect):
+ if dialect.name == 'postgresql':
+ return dialect.type_descriptor(UUID())
+ else:
+ return dialect.type_descriptor(CHAR(32))
+
+ def process_bind_param(self, value, dialect):
+ if value is None:
+ return value
+ elif dialect.name == 'postgresql':
+ return str(value)
+ else:
+ if not isinstance(value, uuid.UUID):
+ return "%x" % uuid.UUID(value)
+ else:
+ # hexstring
+ return "%x" % value
+
+ def process_result_value(self, value, dialect):
+ if value is None:
+ return value
+ else:
+ return uuid.UUID(value)
+
+Marshal JSON Strings
+^^^^^^^^^^^^^^^^^^^^^
+
+This type uses ``simplejson`` to marshal Python data structures
+to/from JSON. Can be modified to use Python's builtin json encoder.
+
+Note that the base type is not "mutable", meaning in-place changes to
+the value will not be detected by the ORM - you instead would need to
+replace the existing value with a new one to detect changes.
+The subtype ``MutableJSONEncodedDict``
+adds "mutability" to allow this, but note that "mutable" types add
+a significant performance penalty to the ORM's flush process::
+
+ from sqlalchemy.types import TypeDecorator, MutableType, VARCHAR
+ import simplejson
+
+ class JSONEncodedDict(TypeDecorator):
+ """Represents an immutable structure as a json-encoded string.
+
+ Usage::
+
+ JSONEncodedDict(255)
+
+ """
+
+ impl = VARCHAR
+
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ value = simplejson.dumps(value, use_decimal=True)
+
+ return value
+
+ def process_result_value(self, value, dialect):
+ if value is not None:
+ value = simplejson.loads(value, use_decimal=True)
+ return value
+
+ class MutableJSONEncodedDict(MutableType, JSONEncodedDict):
+ """Adds mutability to JSONEncodedDict."""
+
+ def copy_value(self, value):
+ return simplejson.loads(
+ simplejson.dumps(value, use_decimal=True),
+ use_decimal=True)
+
Creating New Types
~~~~~~~~~~~~~~~~~~
diff --git a/doc/build/orm/mapper_config.rst b/doc/build/orm/mapper_config.rst
index 954d43646..40512f520 100644
--- a/doc/build/orm/mapper_config.rst
+++ b/doc/build/orm/mapper_config.rst
@@ -21,7 +21,7 @@ Would translate into declarative as::
class User(Base):
__table__ = users_table
__mapper_args__ = {
- 'primary_key':users_table.c.id
+ 'primary_key':[users_table.c.id]
}
Or if using ``__tablename__``, :class:`.Column` objects are declared inline
@@ -33,7 +33,7 @@ with the class definition. These are usable as is within ``__mapper_args__``::
id = Column(Integer)
__mapper_args__ = {
- 'primary_key':id
+ 'primary_key':[id]
}
@@ -77,7 +77,7 @@ collections (new feature as of 0.6.4)::
mapper(UserAddress, users_table.join(addresses_table),
exclude_properties=[addresses_table.c.id],
- primary_key=users_table.c.id
+ primary_key=[users_table.c.id]
)
It should be noted that insert and update defaults configured on individal
@@ -142,6 +142,33 @@ together using a list, as below where we map to a :func:`~.expression.join`::
For further examples on this particular use case, see :ref:`maptojoin`.
+column_property API
+~~~~~~~~~~~~~~~~~~~
+
+The establishment of a :class:`.Column` on a :func:`.mapper` can be further
+customized using the :func:`.column_property` function, as specified
+to the ``properties`` dictionary. This function is
+usually invoked implicitly for each mapped :class:`.Column`. Explicit usage
+looks like::
+
+ from sqlalchemy.orm import mapper, column_property
+
+ mapper(User, users, properties={
+ 'name':column_property(users.c.name, active_history=True)
+ })
+
+or with declarative::
+
+ class User(Base):
+ __tablename__ = 'users'
+
+ id = Column(Integer, primary_key=True)
+ name = column_property(Column(String(50)), active_history=True)
+
+Further examples of :func:`.column_property` are at :ref:`mapper_sql_expressions`.
+
+.. autofunction:: column_property
+
.. _deferred:
Deferred Column Loading
@@ -267,8 +294,6 @@ Correlated subqueries may be used as well::
The declarative form of the above is described in :ref:`declarative_sql_expressions`.
-.. autofunction:: column_property
-
Note that :func:`.column_property` is used to provide the effect of a SQL
expression that is actively rendered into the SELECT generated for a
particular mapped class. Alternatively, for the typical attribute that
diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
index eb25e614e..87a84e514 100644
--- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py
+++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
@@ -66,6 +66,52 @@ Two Phase Transaction Support
Two Phase transactions are implemented using XA transactions. Success has been reported
with this feature but it should be regarded as experimental.
+Precision Numerics
+------------------
+
+The SQLAlchemy dialect goes thorugh a lot of steps to ensure
+that decimal numbers are sent and received with full accuracy.
+An "outputtypehandler" callable is associated with each
+cx_oracle connection object which detects numeric types and
+receives them as string values, instead of receiving a Python
+``float`` directly, which is then passed to the Python
+``Decimal`` constructor. The :class:`.Numeric` and
+:class:`.Float` types under the cx_oracle dialect are aware of
+this behavior, and will coerce the ``Decimal`` to ``float`` if
+the ``asdecimal`` flag is ``False`` (default on :class:`.Float`,
+optional on :class:`.Numeric`).
+
+The handler attempts to use the "precision" and "scale"
+attributes of the result set column to best determine if
+subsequent incoming values should be received as ``Decimal`` as
+opposed to int (in which case no processing is added). There are
+several scenarios where OCI_ does not provide unambiguous data
+as to the numeric type, including some situations where
+individual rows may return a combination of floating point and
+integer values. Certain values for "precision" and "scale" have
+been observed to determine this scenario. When it occurs, the
+outputtypehandler receives as string and then passes off to a
+processing function which detects, for each returned value, if a
+decimal point is present, and if so converts to ``Decimal``,
+otherwise to int. The intention is that simple int-based
+statements like "SELECT my_seq.nextval() FROM DUAL" continue to
+return ints and not ``Decimal`` objects, and that any kind of
+floating point value is received as a string so that there is no
+floating point loss of precision.
+
+The "decimal point is present" logic itself is also sensitive to
+locale. Under OCI_, this is controlled by the NLS_LANG
+environment variable. Upon first connection, the dialect runs a
+test to determine the current "decimal" character, which can be
+a comma "," for european locales. From that point forward the
+outputtypehandler uses that character to represent a decimal
+point (this behavior is new in version 0.6.6). Note that
+cx_oracle 5.0.3 or greater is required when dealing with
+numerics with locale settings that don't use a period "." as the
+decimal character.
+
+.. _OCI: http://www.oracle.com/technetwork/database/features/oci/index.html
+
"""
from sqlalchemy.dialects.oracle.base import OracleCompiler, OracleDialect, \
@@ -76,6 +122,7 @@ from sqlalchemy import types as sqltypes, util, exc, processors
from datetime import datetime
import random
from decimal import Decimal
+import re
class _OracleNumeric(sqltypes.Numeric):
def bind_processor(self, dialect):
@@ -473,37 +520,80 @@ class OracleDialect_cx_oracle(OracleDialect):
self.dbapi.BLOB: oracle.BLOB(),
self.dbapi.BINARY: oracle.RAW(),
}
+ @classmethod
+ def dbapi(cls):
+ import cx_Oracle
+ return cx_Oracle
def initialize(self, connection):
super(OracleDialect_cx_oracle, self).initialize(connection)
if self._is_oracle_8:
self.supports_unicode_binds = False
+ self._detect_decimal_char(connection)
+
+ def _detect_decimal_char(self, connection):
+ """detect if the decimal separator character is not '.', as
+ is the case with european locale settings for NLS_LANG.
+
+ cx_oracle itself uses similar logic when it formats Python
+ Decimal objects to strings on the bind side (as of 5.0.3),
+ as Oracle sends/receives string numerics only in the
+ current locale.
+
+ """
+ if self.cx_oracle_ver < (5,):
+ # no output type handlers before version 5
+ return
+
+ cx_Oracle = self.dbapi
+ conn = connection.connection
+
+ # override the output_type_handler that's
+ # on the cx_oracle connection with a plain
+ # one on the cursor
+
+ def output_type_handler(cursor, name, defaultType,
+ size, precision, scale):
+ return cursor.var(
+ cx_Oracle.STRING,
+ 255, arraysize=cursor.arraysize)
+
+ cursor = conn.cursor()
+ cursor.outputtypehandler = output_type_handler
+ cursor.execute("SELECT 0.1 FROM DUAL")
+ val = cursor.fetchone()[0]
+ cursor.close()
+ char = re.match(r"([\.,])", val).group(1)
+ if char != '.':
+ _detect_decimal = self._detect_decimal
+ self._detect_decimal = \
+ lambda value: _detect_decimal(value.replace(char, '.'))
+ self._to_decimal = \
+ lambda value: Decimal(value.replace(char, '.'))
+
+ def _detect_decimal(self, value):
+ if "." in value:
+ return Decimal(value)
+ else:
+ return int(value)
+
+ _to_decimal = Decimal
- @classmethod
- def dbapi(cls):
- import cx_Oracle
- return cx_Oracle
-
def on_connect(self):
if self.cx_oracle_ver < (5,):
# no output type handlers before version 5
return
- def maybe_decimal(value):
- if "." in value:
- return Decimal(value)
- else:
- return int(value)
-
cx_Oracle = self.dbapi
- def output_type_handler(cursor, name, defaultType, size, precision, scale):
+ def output_type_handler(cursor, name, defaultType,
+ size, precision, scale):
# convert all NUMBER with precision + positive scale to Decimal
# this almost allows "native decimal" mode.
if defaultType == cx_Oracle.NUMBER and precision and scale > 0:
return cursor.var(
cx_Oracle.STRING,
255,
- outconverter=Decimal,
+ outconverter=self._to_decimal,
arraysize=cursor.arraysize)
# if NUMBER with zero precision and 0 or neg scale, this appears
# to indicate "ambiguous". Use a slower converter that will
@@ -515,7 +605,7 @@ class OracleDialect_cx_oracle(OracleDialect):
return cursor.var(
cx_Oracle.STRING,
255,
- outconverter=maybe_decimal,
+ outconverter=self._detect_decimal,
arraysize=cursor.arraysize)
# allow all strings to come back natively as Unicode
elif defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
@@ -578,7 +668,10 @@ class OracleDialect_cx_oracle(OracleDialect):
return ([], opts)
def _get_server_version_info(self, connection):
- return tuple(int(x) for x in connection.connection.version.split('.'))
+ return tuple(
+ int(x)
+ for x in connection.connection.version.split('.')
+ )
def is_disconnect(self, e):
if isinstance(e, self.dbapi.InterfaceError):
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index 0d103cb0d..7b1a97c32 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -94,13 +94,18 @@ from sqlalchemy.sql import compiler, expression, util as sql_util
from sqlalchemy.sql import operators as sql_operators
from sqlalchemy import types as sqltypes
+try:
+ from uuid import UUID as _python_UUID
+except ImportError:
+ _python_UUID = None
+
from sqlalchemy.types import INTEGER, BIGINT, SMALLINT, VARCHAR, \
CHAR, TEXT, FLOAT, NUMERIC, \
DATE, BOOLEAN
-_DECIMAL_TYPES = (1700, 1231)
+_DECIMAL_TYPES = (1231, 1700)
_FLOAT_TYPES = (700, 701, 1021, 1022)
-
+_INT_TYPES = (20, 21, 23, 26, 1005, 1007, 1016)
class REAL(sqltypes.Float):
__visit_name__ = "REAL"
@@ -134,6 +139,12 @@ class TIME(sqltypes.TIME):
self.precision = precision
class INTERVAL(sqltypes.TypeEngine):
+ """Postgresql INTERVAL type.
+
+ The INTERVAL type may not be supported on all DBAPIs.
+ It is known to work on psycopg2 and not pg8000 or zxjdbc.
+
+ """
__visit_name__ = 'INTERVAL'
def __init__(self, precision=None):
self.precision = precision
@@ -156,17 +167,67 @@ class BIT(sqltypes.TypeEngine):
PGBit = BIT
class UUID(sqltypes.TypeEngine):
+ """Postgresql UUID type.
+
+ Represents the UUID column type, interpreting
+ data either as natively returned by the DBAPI
+ or as Python uuid objects.
+
+ The UUID type may not be supported on all DBAPIs.
+ It is known to work on psycopg2 and not pg8000.
+
+ """
__visit_name__ = 'UUID'
+
+ def __init__(self, as_uuid=False):
+ """Construct a UUID type.
+
+
+ :param as_uuid=False: if True, values will be interpreted
+ as Python uuid objects, converting to/from string via the
+ DBAPI.
+
+ """
+ if as_uuid and _python_UUID is None:
+ raise NotImplementedError(
+ "This version of Python does not support the native UUID type."
+ )
+ self.as_uuid = as_uuid
+
+ def bind_processor(self, dialect):
+ if self.as_uuid:
+ def process(value):
+ if value is not None:
+ value = str(value)
+ return value
+ return process
+ else:
+ return None
+
+ def result_processor(self, dialect, coltype):
+ if self.as_uuid:
+ def process(value):
+ if value is not None:
+ value = _python_UUID(value)
+ return value
+ return process
+ else:
+ return None
+
PGUuid = UUID
class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
"""Postgresql ARRAY type.
Represents values as Python lists.
+
+ The ARRAY type may not be supported on all DBAPIs.
+ It is known to work on psycopg2 and not pg8000.
**Note:** be sure to read the notes for
- :class:`~sqlalchemy.types.MutableType` regarding ORM
- performance implications.
+ :class:`.MutableType` regarding ORM
+ performance implications. The :class:`.ARRAY` type's
+ mutability can be disabled using the "mutable" flag.
"""
__visit_name__ = 'ARRAY'
diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py
index 6af2cbd76..7b1d8e6a7 100644
--- a/lib/sqlalchemy/dialects/postgresql/pg8000.py
+++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py
@@ -9,14 +9,16 @@ URLs are of the form
Unicode
-------
-pg8000 requires that the postgresql client encoding be configured in the postgresql.conf file
-in order to use encodings other than ascii. Set this value to the same value as
-the "encoding" parameter on create_engine(), usually "utf-8".
+pg8000 requires that the postgresql client encoding be
+configured in the postgresql.conf file in order to use encodings
+other than ascii. Set this value to the same value as the
+"encoding" parameter on create_engine(), usually "utf-8".
Interval
--------
-Passing data from/to the Interval type is not supported as of yet.
+Passing data from/to the Interval type is not supported as of
+yet.
"""
import decimal
@@ -27,26 +29,28 @@ from sqlalchemy import processors
from sqlalchemy import types as sqltypes
from sqlalchemy.dialects.postgresql.base import PGDialect, \
PGCompiler, PGIdentifierPreparer, PGExecutionContext,\
- _DECIMAL_TYPES, _FLOAT_TYPES
+ _DECIMAL_TYPES, _FLOAT_TYPES, _INT_TYPES
class _PGNumeric(sqltypes.Numeric):
def result_processor(self, dialect, coltype):
if self.asdecimal:
if coltype in _FLOAT_TYPES:
return processors.to_decimal_processor_factory(decimal.Decimal)
- elif coltype in _DECIMAL_TYPES:
+ elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
# pg8000 returns Decimal natively for 1700
return None
else:
- raise exc.InvalidRequestError("Unknown PG numeric type: %d" % coltype)
+ raise exc.InvalidRequestError(
+ "Unknown PG numeric type: %d" % coltype)
else:
if coltype in _FLOAT_TYPES:
# pg8000 returns float natively for 701
return None
- elif coltype in _DECIMAL_TYPES:
+ elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
return processors.to_float
else:
- raise exc.InvalidRequestError("Unknown PG numeric type: %d" % coltype)
+ raise exc.InvalidRequestError(
+ "Unknown PG numeric type: %d" % coltype)
class PGExecutionContext_pg8000(PGExecutionContext):
pass
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index 04b4e1fb7..7c5562064 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -96,8 +96,9 @@ from sqlalchemy.sql import expression
from sqlalchemy.sql import operators as sql_operators
from sqlalchemy import types as sqltypes
from sqlalchemy.dialects.postgresql.base import PGDialect, PGCompiler, \
- PGIdentifierPreparer, PGExecutionContext, \
- ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES
+ PGIdentifierPreparer, PGExecutionContext, \
+ ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES,\
+ _INT_TYPES
logger = logging.getLogger('sqlalchemy.dialects.postgresql')
@@ -111,19 +112,21 @@ class _PGNumeric(sqltypes.Numeric):
if self.asdecimal:
if coltype in _FLOAT_TYPES:
return processors.to_decimal_processor_factory(decimal.Decimal)
- elif coltype in _DECIMAL_TYPES:
+ elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
# pg8000 returns Decimal natively for 1700
return None
else:
- raise exc.InvalidRequestError("Unknown PG numeric type: %d" % coltype)
+ raise exc.InvalidRequestError(
+ "Unknown PG numeric type: %d" % coltype)
else:
if coltype in _FLOAT_TYPES:
# pg8000 returns float natively for 701
return None
- elif coltype in _DECIMAL_TYPES:
+ elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
return processors.to_float
else:
- raise exc.InvalidRequestError("Unknown PG numeric type: %d" % coltype)
+ raise exc.InvalidRequestError(
+ "Unknown PG numeric type: %d" % coltype)
class _PGEnum(ENUM):
def __init__(self, *arg, **kw):
diff --git a/lib/sqlalchemy/ext/declarative.py b/lib/sqlalchemy/ext/declarative.py
index 3ae81a977..8381e5ee1 100755
--- a/lib/sqlalchemy/ext/declarative.py
+++ b/lib/sqlalchemy/ext/declarative.py
@@ -999,6 +999,10 @@ def _as_declarative(cls, classname, dict_):
isinstance(obj, declarative_props)
):
table_args = cls.__table_args__
+ if not isinstance(table_args, (tuple, dict, type(None))):
+ raise exceptions.ArgumentError(
+ "__table_args__ value must be a tuple, "
+ "dict, or None")
if base is not cls:
inherited_table_args = True
elif class_mapped:
diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py
index 18031e15f..b51142909 100644
--- a/lib/sqlalchemy/orm/__init__.py
+++ b/lib/sqlalchemy/orm/__init__.py
@@ -206,6 +206,16 @@ def relationship(argument, secondary=None, **kwargs):
generally mutually exclusive with the use of the *secondary*
keyword argument.
+ :param active_history=False:
+ When ``True``, indicates that the "previous" value for a
+ many-to-one reference should be loaded when replaced, if
+ not already loaded. Normally, history tracking logic for
+ simple many-to-ones only needs to be aware of the "new"
+ value in order to perform a flush. This flag is available
+ for applications that make use of
+ :func:`.attributes.get_history` which also need to know
+ the "previous" value of the attribute. (New in 0.6.6)
+
:param backref:
indicates the string name of a property to be placed on the related
mapper's class that will handle this relationship in the other
@@ -576,7 +586,7 @@ def column_property(*args, **kwargs):
"""Provide a column-level property for use with a Mapper.
Column-based properties can normally be applied to the mapper's
- ``properties`` dictionary using the ``schema.Column`` element directly.
+ ``properties`` dictionary using the :class:`.Column` element directly.
Use this function when the given column is not directly present within the
mapper's selectable; examples include SQL expressions, functions, and
scalar SELECT queries.
@@ -587,6 +597,16 @@ def column_property(*args, **kwargs):
:param \*cols:
list of Column objects to be mapped.
+ :param active_history=False:
+ When ``True``, indicates that the "previous" value for a
+ scalar attribute should be loaded when replaced, if not
+ already loaded. Normally, history tracking logic for
+ simple non-primary-key scalar values only needs to be
+ aware of the "new" value in order to perform a flush. This
+ flag is available for applications that make use of
+ :func:`.attributes.get_history` which also need to know
+ the "previous" value of the attribute. (new in 0.6.6)
+
:param comparator_factory: a class which extends
:class:`.ColumnProperty.Comparator` which provides custom SQL clause
generation for comparison operations.
@@ -690,7 +710,7 @@ def mapper(class_, local_table=None, *args, **params):
exist within the session, erasing any in-memory changes with
whatever information was loaded from the database. Usage of this
flag is highly discouraged; as an alternative, see the method
- `populate_existing()` on :class:`~sqlalchemy.orm.query.Query`.
+ :meth:`.Query.populate_existing`.
:param allow_null_pks: This flag is deprecated - this is stated as
allow_partial_pks which defaults to True.
@@ -699,7 +719,7 @@ def mapper(class_, local_table=None, *args, **params):
composite primary key with some NULL values should be considered as
possibly existing within the database. This affects whether a
mapper will assign an incoming row to an existing identity, as well
- as if session.merge() will check the database first for a
+ as if :meth:`.Session.merge` will check the database first for a
particular primary key value. A "partial primary key" can occur if
one has mapped to an OUTER JOIN, for example.
@@ -708,13 +728,14 @@ def mapper(class_, local_table=None, *args, **params):
that an instance will be fully saved before saving the next
instance, which includes inserting/updating all table rows
corresponding to the entity as well as calling all
- :class:`MapperExtension` methods corresponding to the save
+ :class:`.MapperExtension` methods corresponding to the save
operation.
:param column_prefix: A string which will be prepended to the `key`
- name of all Columns when creating column-based properties from the
- given Table. Does not affect explicitly specified column-based
- properties
+ name of all :class:`.Column` objects when creating
+ column-based properties from the
+ given :class:`.Table`. Does not affect explicitly specified
+ column-based properties
:param concrete: If True, indicates this mapper should use concrete
table inheritance with its parent mapper.
@@ -729,9 +750,9 @@ def mapper(class_, local_table=None, *args, **params):
to have fewer properties than its superclass, ``A``.
:param extension: A :class:`.MapperExtension` instance or
- list of :class:`~sqlalchemy.orm.interfaces.MapperExtension`
+ list of :class:`.MapperExtension`
instances which will be applied to all operations by this
- :class:`~sqlalchemy.orm.mapper.Mapper`.
+ :class:`.Mapper`.
:param include_properties: An inclusive list or set of string column
names to map. As of SQLAlchemy 0.6.4, this collection may also
@@ -741,13 +762,13 @@ def mapper(class_, local_table=None, *args, **params):
present in the mapped table but not named or present in this list
will not be automatically mapped. See also "exclude_properties".
- :param inherits: Another :class:`~sqlalchemy.orm.Mapper` for which
- this :class:`~sqlalchemy.orm.Mapper` will have an inheritance
+ :param inherits: Another :class:`.Mapper` for which
+ this :class:`.Mapper` will have an inheritance
relationship with.
:param inherit_condition: For joined table inheritance, a SQL
expression (constructed
- :class:`~sqlalchemy.expression.sql.ClauseElement`) which will
+ :class:`.ClauseElement`) which will
define how the two tables are joined; defaults to a natural join
between the two tables.
@@ -759,7 +780,7 @@ def mapper(class_, local_table=None, *args, **params):
the selection of instances, not their persistence. Any number of
non_primary mappers may be created for a particular class.
- :param order_by: A single :class:`Column` or list of :class:`Column`
+ :param order_by: A single :class:`.Column` or list of :class:`.Column`
objects for which selection operations should use as the default
ordering for entities. Defaults to the OID/ROWID of the table if
any, or the first primary key column of the table.
@@ -794,7 +815,7 @@ def mapper(class_, local_table=None, *args, **params):
this flag.
:param polymorphic_on: Used with mappers in an inheritance
- relationship, a ``Column`` which will identify the class/mapper
+ relationship, a :class:`.Column` which will identify the class/mapper
combination to be used with a particular row. Requires the
``polymorphic_identity`` value to be set for all mappers in the
inheritance hierarchy. The column specified by ``polymorphic_on``
@@ -804,23 +825,23 @@ def mapper(class_, local_table=None, *args, **params):
argument.
:param polymorphic_identity: A value which will be stored in the
- Column denoted by polymorphic_on, corresponding to the *class
- identity* of this mapper.
+ Column denoted by polymorphic_on, corresponding to the class
+ identity of this mapper.
:param properties: A dictionary mapping the string names of object
attributes to ``MapperProperty`` instances, which define the
persistence behavior of that attribute. Note that the columns in
the mapped table are automatically converted into
- ``ColumnProperty`` instances based on the `key` property of each
- ``Column`` (although they can be overridden using this dictionary).
+ ``ColumnProperty`` instances based on the ``key`` property of each
+ :class:`.Column` (although they can be overridden using this dictionary).
- :param primary_key: A list of ``Column`` objects which define the
- *primary key* to be used against this mapper's selectable unit.
- This is normally simply the primary key of the `local_table`, but
+ :param primary_key: A list of :class:`.Column` objects which define the
+ primary key to be used against this mapper's selectable unit.
+ This is normally simply the primary key of the ``local_table``, but
can be overridden here.
- :param version_id_col: A ``Column`` which must have an integer type
- that will be used to keep a running *version id* of mapped entities
+ :param version_id_col: A :class:`.Column` which must have an integer type
+ that will be used to keep a running version id of mapped entities
in the database. this is used during save operations to ensure that
no other thread or process has updated the instance during the
lifetime of the entity, else a :class:`StaleDataError` exception is
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index f6c7dd03d..1ef7de076 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -890,6 +890,10 @@ class AttributeExtension(object):
active_history = True
"""indicates that the set() method would like to receive the 'old' value,
even if it means firing lazy callables.
+
+ Note that ``active_history`` can also be set directly via
+ :func:`.column_property` and :func:`.relationship`.
+
"""
def append(self, state, value, initiator):
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index ffe78e2f3..194cb693a 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -107,7 +107,7 @@ class Mapper(object):
self.class_manager = None
- self.primary_key_argument = primary_key
+ self.primary_key_argument = util.to_list(primary_key)
self.non_primary = non_primary
if order_by is not False:
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index 062eef04d..02e883de4 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -60,6 +60,7 @@ class ColumnProperty(StrategizedProperty):
self.__class__.Comparator)
self.descriptor = kwargs.pop('descriptor', None)
self.extension = kwargs.pop('extension', None)
+ self.active_history = kwargs.pop('active_history', False)
if 'doc' in kwargs:
self.doc = kwargs.pop('doc')
@@ -114,6 +115,7 @@ class ColumnProperty(StrategizedProperty):
return ColumnProperty(
deferred=self.deferred,
group=self.group,
+ active_history=self.active_history,
*self.columns)
def _getattr(self, state, dict_, column, passive=False):
@@ -184,6 +186,7 @@ class CompositeProperty(ColumnProperty):
deferred=self.deferred,
group=self.group,
composite_class=self.composite_class,
+ active_history=self.active_history,
*self.columns)
def do_init(self):
@@ -447,6 +450,7 @@ class RelationshipProperty(StrategizedProperty):
comparator_factory=None,
single_parent=False, innerjoin=False,
doc=None,
+ active_history=False,
cascade_backrefs=True,
load_on_pending=False,
strategy_class=None, _local_remote_pairs=None,
@@ -472,6 +476,7 @@ class RelationshipProperty(StrategizedProperty):
self.query_class = query_class
self.innerjoin = innerjoin
self.doc = doc
+ self.active_history = active_history
self.join_depth = join_depth
self.local_remote_pairs = _local_remote_pairs
self.extension = extension
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index ef9a509a4..798614daf 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -1526,7 +1526,10 @@ class Query(object):
else:
return list(res)
else:
- return list(self[item:item+1])[0]
+ if item == -1:
+ return list(self)[-1]
+ else:
+ return list(self[item:item+1])[0]
@_generative(_no_statement_condition)
def slice(self, start, stop):
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index 4e2021674..8413dd2b3 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -108,7 +108,8 @@ class ColumnLoader(LoaderStrategy):
self.is_class_level = True
coltype = self.columns[0].type
# TODO: check all columns ? check for foreign key as well?
- active_history = self.columns[0].primary_key
+ active_history = self.parent_property.active_history or \
+ self.columns[0].primary_key
_register_attribute(self, mapper, useobject=False,
compare_function=coltype.compare_values,
@@ -163,8 +164,7 @@ class CompositeColumnLoader(ColumnLoader):
_register_attribute(self, mapper, useobject=False,
compare_function=compare,
copy_function=copy,
- mutable_scalars=True
- #active_history ?
+ mutable_scalars=True,
)
def create_row_processor(self, selectcontext, path, mapper,
@@ -398,6 +398,7 @@ class LazyLoader(AbstractRelationshipLoader):
uselist = self.parent_property.uselist,
typecallable = self.parent_property.collection_class,
active_history = \
+ self.parent_property.active_history or \
self.parent_property.direction is not \
interfaces.MANYTOONE or \
not self.use_get,
diff --git a/lib/sqlalchemy/processors.py b/lib/sqlalchemy/processors.py
index e73e26456..88dabe87c 100644
--- a/lib/sqlalchemy/processors.py
+++ b/lib/sqlalchemy/processors.py
@@ -26,6 +26,12 @@ def str_to_datetime_processor_factory(regexp, type_):
return type_(*map(int, rmatch(value).groups(0)))
return process
+def boolean_to_int(value):
+ if value is None:
+ return None
+ else:
+ return int(value)
+
try:
from sqlalchemy.cprocessors import UnicodeResultProcessor, \
DecimalResultProcessor, \
diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py
index 9f322d1eb..111f2314b 100644
--- a/lib/sqlalchemy/types.py
+++ b/lib/sqlalchemy/types.py
@@ -1682,6 +1682,12 @@ class Boolean(TypeEngine, SchemaType):
)
table.append_constraint(e)
+ def bind_processor(self, dialect):
+ if dialect.supports_native_boolean:
+ return None
+ else:
+ return processors.boolean_to_int
+
def result_processor(self, dialect, coltype):
if dialect.supports_native_boolean:
return None
diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py
index 04b9d3274..6627015b9 100644
--- a/test/dialect/test_oracle.py
+++ b/test/dialect/test_oracle.py
@@ -367,7 +367,7 @@ class CompatFlagsTest(TestBase, AssertsCompiledSQL):
def server_version_info(self):
return (8, 2, 5)
- dialect = oracle.dialect()
+ dialect = oracle.dialect(dbapi=testing.db.dialect.dbapi)
dialect._get_server_version_info = server_version_info
# before connect, assume modern DB
@@ -384,7 +384,8 @@ class CompatFlagsTest(TestBase, AssertsCompiledSQL):
self.assert_compile(Unicode(50),"VARCHAR(50)",dialect=dialect)
self.assert_compile(UnicodeText(),"CLOB",dialect=dialect)
- dialect = oracle.dialect(implicit_returning=True)
+ dialect = oracle.dialect(implicit_returning=True,
+ dbapi=testing.db.dialect.dbapi)
dialect._get_server_version_info = server_version_info
dialect.initialize(testing.db.connect())
assert dialect.implicit_returning
@@ -392,7 +393,7 @@ class CompatFlagsTest(TestBase, AssertsCompiledSQL):
def test_default_flags(self):
"""test with no initialization or server version info"""
- dialect = oracle.dialect()
+ dialect = oracle.dialect(dbapi=testing.db.dialect.dbapi)
assert dialect._supports_char_length
assert dialect._supports_nchar
assert dialect.use_ansi
@@ -403,7 +404,7 @@ class CompatFlagsTest(TestBase, AssertsCompiledSQL):
def test_ora10_flags(self):
def server_version_info(self):
return (10, 2, 5)
- dialect = oracle.dialect()
+ dialect = oracle.dialect(dbapi=testing.db.dialect.dbapi)
dialect._get_server_version_info = server_version_info
dialect.initialize(testing.db.connect())
assert dialect._supports_char_length
@@ -1043,7 +1044,40 @@ class TypesTest(TestBase, AssertsCompiledSQL):
finally:
t.drop(engine)
-
+class EuroNumericTest(TestBase):
+ """test the numeric output_type_handler when using non-US locale for NLS_LANG."""
+
+ __only_on__ = 'oracle+cx_oracle'
+
+ def setup(self):
+ self.old_nls_lang = os.environ.get('NLS_LANG', False)
+ os.environ['NLS_LANG'] = "GERMAN"
+ self.engine = testing_engine()
+
+ def teardown(self):
+ if self.old_nls_lang is not False:
+ os.environ['NLS_LANG'] = self.old_nls_lang
+ else:
+ del os.environ['NLS_LANG']
+ self.engine.dispose()
+
+ @testing.provide_metadata
+ def test_output_type_handler(self):
+ for stmt, exp, kw in [
+ ("SELECT 0.1 FROM DUAL", Decimal("0.1"), {}),
+ ("SELECT 15 FROM DUAL", 15, {}),
+ ("SELECT CAST(15 AS NUMERIC(3, 1)) FROM DUAL", Decimal("15"), {}),
+ ("SELECT CAST(0.1 AS NUMERIC(5, 2)) FROM DUAL", Decimal("0.1"), {}),
+ ("SELECT :num FROM DUAL", Decimal("2.5"), {'num':Decimal("2.5")})
+ ]:
+ test_exp = self.engine.scalar(stmt, **kw)
+ eq_(
+ test_exp,
+ exp
+ )
+ assert type(test_exp) is type(exp)
+
+
class DontReflectIOTTest(TestBase):
"""test that index overflow tables aren't included in
table_names."""
diff --git a/test/dialect/test_postgresql.py b/test/dialect/test_postgresql.py
index e20274aef..f3eb91ef2 100644
--- a/test/dialect/test_postgresql.py
+++ b/test/dialect/test_postgresql.py
@@ -456,7 +456,25 @@ class EnumTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
assert t2.c.value2.type.schema == 'test_schema'
finally:
metadata.drop_all()
+
+class NumericInterpretationTest(TestBase):
+
+
+ def test_numeric_codes(self):
+ from sqlalchemy.dialects.postgresql import pg8000, psycopg2, base
+ from decimal import Decimal
+ for dialect in (pg8000.dialect(), psycopg2.dialect()):
+
+ typ = Numeric().dialect_impl(dialect)
+ for code in base._INT_TYPES + base._FLOAT_TYPES + \
+ base._DECIMAL_TYPES:
+ proc = typ.result_processor(dialect, code)
+ val = 23.7
+ if proc is not None:
+ val = proc(val)
+ assert val in (23.7, Decimal("23.7"))
+
class InsertTest(TestBase, AssertsExecutionResults):
__only_on__ = 'postgresql'
@@ -1866,6 +1884,64 @@ class SpecialTypesTest(TestBase, ComparesTables):
assert t.c.plain_interval.type.precision is None
assert t.c.precision_interval.type.precision == 3
+class UUIDTest(TestBase):
+ """Test the bind/return values of the UUID type."""
+
+ __only_on__ = 'postgresql'
+
+ @testing.fails_on('postgresql+pg8000', 'No support for UUID type')
+ def test_uuid_string(self):
+ import uuid
+ self._test_round_trip(
+ Table('utable', MetaData(),
+ Column('data', postgresql.UUID())
+ ),
+ str(uuid.uuid4()),
+ str(uuid.uuid4())
+ )
+
+ @testing.fails_on('postgresql+pg8000', 'No support for UUID type')
+ def test_uuid_uuid(self):
+ import uuid
+ self._test_round_trip(
+ Table('utable', MetaData(),
+ Column('data', postgresql.UUID(as_uuid=True))
+ ),
+ uuid.uuid4(),
+ uuid.uuid4()
+ )
+
+ def test_no_uuid_available(self):
+ from sqlalchemy.dialects.postgresql import base
+ uuid_type = base._python_UUID
+ base._python_UUID = None
+ try:
+ assert_raises(
+ NotImplementedError,
+ postgresql.UUID, as_uuid=True
+ )
+ finally:
+ base._python_UUID = uuid_type
+
+ def setup(self):
+ self.conn = testing.db.connect()
+ trans = self.conn.begin()
+
+ def teardown(self):
+ self.conn.close()
+
+ def _test_round_trip(self, utable, value1, value2):
+ utable.create(self.conn)
+ self.conn.execute(utable.insert(), {'data':value1})
+ self.conn.execute(utable.insert(), {'data':value2})
+ r = self.conn.execute(
+ select([utable.c.data]).
+ where(utable.c.data != value1)
+ )
+ eq_(r.fetchone()[0], value2)
+ eq_(r.fetchone(), None)
+
+
class MatchTest(TestBase, AssertsCompiledSQL):
__only_on__ = 'postgresql'
diff --git a/test/ext/test_declarative.py b/test/ext/test_declarative.py
index 72e2edf30..7c8ab0016 100644
--- a/test/ext/test_declarative.py
+++ b/test/ext/test_declarative.py
@@ -631,7 +631,7 @@ class DeclarativeTest(DeclarativeTestBase):
'Mapper Mapper|User|users could not '
'assemble any primary key', define)
- def test_table_args(self):
+ def test_table_args_bad_format(self):
def err():
class Foo1(Base):
@@ -643,7 +643,30 @@ class DeclarativeTest(DeclarativeTestBase):
assert_raises_message(sa.exc.ArgumentError,
'Tuple form of __table_args__ is ', err)
+
+ def test_table_args_type(self):
+ def err():
+ class Foo1(Base):
+
+ __tablename__ = 'foo'
+ __table_args__ = ForeignKeyConstraint(['id'], ['foo.id'
+ ])
+ id = Column('id', Integer, primary_key=True)
+ assert_raises_message(sa.exc.ArgumentError,
+ '__table_args__ value must be a tuple, ', err)
+
+ def test_table_args_none(self):
+
+ class Foo2(Base):
+ __tablename__ = 'foo'
+ __table_args__ = None
+ id = Column('id', Integer, primary_key=True)
+
+ assert Foo2.__table__.kwargs == {}
+
+ def test_table_args_dict_format(self):
+
class Foo2(Base):
__tablename__ = 'foo'
@@ -652,6 +675,13 @@ class DeclarativeTest(DeclarativeTestBase):
assert Foo2.__table__.kwargs['mysql_engine'] == 'InnoDB'
+ def test_table_args_tuple_format(self):
+ class Foo2(Base):
+
+ __tablename__ = 'foo'
+ __table_args__ = {'mysql_engine': 'InnoDB'}
+ id = Column('id', Integer, primary_key=True)
+
class Bar(Base):
__tablename__ = 'bar'
diff --git a/test/orm/test_generative.py b/test/orm/test_generative.py
index 141fde9fc..31b7be8cc 100644
--- a/test/orm/test_generative.py
+++ b/test/orm/test_generative.py
@@ -48,6 +48,9 @@ class GenerativeQueryTest(_base.MappedTest):
orig = query.all()
assert query[1] == orig[1]
+ assert query[-4] == orig[-4]
+ assert query[-1] == orig[-1]
+
assert list(query[10:20]) == orig[10:20]
assert list(query[10:]) == orig[10:]
assert list(query[:10]) == orig[:10]
@@ -77,12 +80,12 @@ class GenerativeQueryTest(_base.MappedTest):
assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).next()[0] == 29
assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).next()[0] == 29
# end Py2K
-
+
+ @testing.fails_if(lambda:testing.against('mysql+mysqldb') and
+ testing.db.dialect.dbapi.version_info[:4] == (1, 2, 1, 'gamma'),
+ "unknown incompatibility")
@testing.resolve_artifact_names
def test_aggregate_1(self):
- if (testing.against('mysql+mysqldb') and
- testing.db.dialect.dbapi.version_info[:4] == (1, 2, 1, 'gamma')):
- return
query = create_session().query(func.sum(foo.c.bar))
assert query.filter(foo.c.bar<30).one() == (435,)
diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py
index ef6ce6688..b1fb42c30 100644
--- a/test/orm/test_mapper.py
+++ b/test/orm/test_mapper.py
@@ -665,6 +665,17 @@ class MapperTest(_fixtures.FixtureTest):
None])
@testing.resolve_artifact_names
+ def test_scalar_pk_arg(self):
+ m1 = mapper(Item, items, primary_key=[items.c.id])
+ m2 = mapper(Keyword, keywords, primary_key=keywords.c.id)
+ m3 = mapper(User, users, primary_key=(users.c.id,))
+
+ assert m1.primary_key[0] is items.c.id
+ assert m2.primary_key[0] is keywords.c.id
+ assert m3.primary_key[0] is users.c.id
+
+
+ @testing.resolve_artifact_names
def test_custom_join(self):
"""select_from totally replace the FROM parameters."""
diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py
index 187c9e534..03efa0183 100644
--- a/test/orm/test_relationships.py
+++ b/test/orm/test_relationships.py
@@ -5,25 +5,15 @@ from sqlalchemy.test import testing
from sqlalchemy import Integer, String, ForeignKey, MetaData, and_
from sqlalchemy.test.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, relation, \
- backref, create_session, compile_mappers, clear_mappers, sessionmaker
+ backref, create_session, compile_mappers, \
+ clear_mappers, sessionmaker, attributes,\
+ Session, composite, column_property
from sqlalchemy.test.testing import eq_, startswith_
from test.orm import _base, _fixtures
-class RelationshipTest(_base.MappedTest):
- """An extended topological sort test
-
- This is essentially an extension of the "dependency.py" topological sort
- test. In this test, a table is dependent on two other tables that are
- otherwise unrelated to each other. The dependency sort must ensure that
- this childmost table is below both parent tables in the outcome (a bug
- existed where this was not always the case).
-
- While the straight topological sort tests should expose this, since the
- sorting can be different due to subtle differences in program execution,
- this test case was exposing the bug whereas the simpler tests were not.
-
- """
+class DependencyTwoParentTest(_base.MappedTest):
+ """Test flush() when a mapper is dependent on multiple relationships"""
run_setup_mappers = 'once'
run_inserts = 'once'
@@ -32,18 +22,24 @@ class RelationshipTest(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("tbl_a", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column("name", String(128)))
Table("tbl_b", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column("name", String(128)))
Table("tbl_c", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"), nullable=False),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"),
+ nullable=False),
Column("name", String(128)))
Table("tbl_d", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"), nullable=False),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"),
+ nullable=False),
Column("tbl_b_id", Integer, ForeignKey("tbl_b.id")),
Column("name", String(128)))
@@ -62,10 +58,12 @@ class RelationshipTest(_base.MappedTest):
@testing.resolve_artifact_names
def setup_mappers(cls):
mapper(A, tbl_a, properties=dict(
- c_rows=relationship(C, cascade="all, delete-orphan", backref="a_row")))
+ c_rows=relationship(C, cascade="all, delete-orphan",
+ backref="a_row")))
mapper(B, tbl_b)
mapper(C, tbl_c, properties=dict(
- d_rows=relationship(D, cascade="all, delete-orphan", backref="c_row")))
+ d_rows=relationship(D, cascade="all, delete-orphan",
+ backref="c_row")))
mapper(D, tbl_d, properties=dict(
b_row=relationship(B)))
@@ -101,8 +99,12 @@ class RelationshipTest(_base.MappedTest):
session.flush()
-class RelationshipTest2(_base.MappedTest):
- """The ultimate relationship() test:
+class CompositeSelfRefFKTest(_base.MappedTest):
+ """Tests a composite FK where, in
+ the relationship(), one col points
+ to itself in the same table.
+
+ this is a very unusual case::
company employee
---------- ----------
@@ -117,22 +119,13 @@ class RelationshipTest2(_base.MappedTest):
employee joins to its sub-employees
both on reports_to_id, *and on company_id to itself*.
- As of 0.5.5 we are making a slight behavioral change,
- such that the custom foreign_keys setting
- on the o2m side has to be explicitly
- unset on the backref m2o side - this to suit
- the vast majority of use cases where the backref()
- is to receive the same foreign_keys argument
- as the forwards reference. But we also
- have smartened the remote_side logic such that
- you don't even need the custom fks setting.
-
"""
@classmethod
def define_tables(cls, metadata):
Table('company_t', metadata,
- Column('company_id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('company_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', sa.Unicode(30)))
Table('employee_t', metadata,
@@ -163,7 +156,10 @@ class RelationshipTest2(_base.MappedTest):
def test_explicit(self):
mapper(Company, company_t)
mapper(Employee, employee_t, properties= {
- 'company':relationship(Company, primaryjoin=employee_t.c.company_id==company_t.c.company_id, backref='employees'),
+ 'company':relationship(Company,
+ primaryjoin=employee_t.c.company_id==
+ company_t.c.company_id,
+ backref='employees'),
'reports_to':relationship(Employee, primaryjoin=
sa.and_(
employee_t.c.emp_id==employee_t.c.reports_to_id,
@@ -244,10 +240,12 @@ class RelationshipTest2(_base.MappedTest):
test_e5 = sess.query(Employee).get([c2.company_id, e5.emp_id])
assert test_e5.name == 'emp5', test_e5.name
assert [x.name for x in test_e1.employees] == ['emp2', 'emp3']
- assert sess.query(Employee).get([c1.company_id, 3]).reports_to.name == 'emp1'
- assert sess.query(Employee).get([c2.company_id, 3]).reports_to.name == 'emp5'
+ assert sess.query(Employee).\
+ get([c1.company_id, 3]).reports_to.name == 'emp1'
+ assert sess.query(Employee).\
+ get([c2.company_id, 3]).reports_to.name == 'emp5'
-class RelationshipTest3(_base.MappedTest):
+class ComplexPostUpdateTest(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("jobs", metadata,
@@ -311,7 +309,8 @@ class RelationshipTest3(_base.MappedTest):
comment.content = u'some content'
return self.currentversion
def add_comment(self):
- nextnum = max([-1] + [c.comment_id for c in self.comments]) + 1
+ nextnum = max([-1] +
+ [c.comment_id for c in self.comments]) + 1
newcomment = PageComment()
newcomment.comment_id = nextnum
self.comments.append(newcomment)
@@ -340,7 +339,7 @@ class RelationshipTest3(_base.MappedTest):
PageVersion,
cascade="all, delete-orphan",
primaryjoin=sa.and_(pages.c.jobno==pageversions.c.jobno,
- pages.c.pagename==pageversions.c.pagename),
+ pages.c.pagename==pageversions.c.pagename),
order_by=pageversions.c.version,
backref=backref('page',lazy='joined')
)})
@@ -348,7 +347,7 @@ class RelationshipTest3(_base.MappedTest):
'page': relationship(
Page,
primaryjoin=sa.and_(pages.c.jobno==pagecomments.c.jobno,
- pages.c.pagename==pagecomments.c.pagename),
+ pages.c.pagename==pagecomments.c.pagename),
backref=backref("comments",
cascade="all, delete-orphan",
order_by=pagecomments.c.comment_id))})
@@ -389,13 +388,14 @@ class RelationshipTest3(_base.MappedTest):
s.delete(j)
s.flush()
-class RelationshipTest4(_base.MappedTest):
+class FKsAsPksTest(_base.MappedTest):
"""Syncrules on foreign keys that are also primary"""
@classmethod
def define_tables(cls, metadata):
Table("tableA", metadata,
- Column("id",Integer,primary_key=True, test_needs_autoincrement=True),
+ Column("id",Integer,primary_key=True,
+ test_needs_autoincrement=True),
Column("foo",Integer,),
test_needs_fk=True)
@@ -413,7 +413,8 @@ class RelationshipTest4(_base.MappedTest):
@testing.resolve_artifact_names
def test_onetoone_switch(self):
- """test that active history is enabled on a one-to-many/one that has use_get==True"""
+ """test that active history is enabled on a
+ one-to-many/one that has use_get==True"""
mapper(A, tableA, properties={
'b':relationship(B, cascade="all,delete-orphan", uselist=False)})
@@ -502,7 +503,8 @@ class RelationshipTest4(_base.MappedTest):
@testing.resolve_artifact_names
def test_delete_cascade_BtoA(self):
- """No 'blank the PK' error when the child is to be deleted as part of a cascade"""
+ """No 'blank the PK' error when the child is to
+ be deleted as part of a cascade"""
for cascade in ("save-update, delete",
#"save-update, delete-orphan",
@@ -527,7 +529,9 @@ class RelationshipTest4(_base.MappedTest):
@testing.resolve_artifact_names
def test_delete_cascade_AtoB(self):
- """No 'blank the PK' error when the child is to be deleted as part of a cascade"""
+ """No 'blank the PK' error when the child is to
+ be deleted as part of a cascade"""
+
for cascade in ("save-update, delete",
#"save-update, delete-orphan",
"save-update, delete, delete-orphan"):
@@ -590,19 +594,25 @@ class RelationshipTest4(_base.MappedTest):
assert a1 not in sess
assert b1 not in sess
-class RelationshipToUniqueTest(_base.MappedTest):
- """test a relationship based on a primary join against a unique non-pk column"""
+class UniqueColReferenceSwitchTest(_base.MappedTest):
+ """test a relationship based on a primary
+ join against a unique non-pk column"""
@classmethod
def define_tables(cls, metadata):
Table("table_a", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("ident", String(10), nullable=False, unique=True),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("ident", String(10), nullable=False,
+ unique=True),
)
Table("table_b", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("a_ident", String(10), ForeignKey('table_a.ident'), nullable=False),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("a_ident", String(10),
+ ForeignKey('table_a.ident'),
+ nullable=False),
)
@classmethod
@@ -632,7 +642,7 @@ class RelationshipToUniqueTest(_base.MappedTest):
session.delete(a1)
session.flush()
-class RelationshipTest5(_base.MappedTest):
+class RelationshipToSelectableTest(_base.MappedTest):
"""Test a map to a select that relates to a map to the table."""
@classmethod
@@ -671,7 +681,8 @@ class RelationshipTest5(_base.MappedTest):
order_by=sa.asc(items.c.id),
primaryjoin=sa.and_(
container_select.c.policyNum==items.c.policyNum,
- container_select.c.policyEffDate==items.c.policyEffDate,
+ container_select.c.policyEffDate==
+ items.c.policyEffDate,
container_select.c.type==items.c.type),
foreign_keys=[
items.c.policyNum,
@@ -697,7 +708,7 @@ class RelationshipTest5(_base.MappedTest):
for old, new in zip(con.lineItems, newcon.lineItems):
eq_(old.id, new.id)
-class RelationshipTest6(_base.MappedTest):
+class FKEquatedToConstantTest(_base.MappedTest):
"""test a relationship with a non-column entity in the primary join,
is not viewonly, and also has the non-column's clause mentioned in the
foreign keys list.
@@ -706,12 +717,14 @@ class RelationshipTest6(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
- Table('tags', metadata, Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
+ Table('tags', metadata, Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column("data", String(50)),
)
Table('tag_foo', metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('tagid', Integer),
Column("data", String(50)),
)
@@ -742,7 +755,10 @@ class RelationshipTest6(_base.MappedTest):
sess.expunge_all()
# relationship works
- eq_(sess.query(Tag).all(), [Tag(data='some tag', foo=[TagInstance(data='iplc_case')])])
+ eq_(
+ sess.query(Tag).all(),
+ [Tag(data='some tag', foo=[TagInstance(data='iplc_case')])]
+ )
# both TagInstances were persisted
eq_(
@@ -755,11 +771,13 @@ class BackrefPropagatesForwardsArgs(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(50))
)
Table('addresses', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('user_id', Integer),
Column('email', String(50))
)
@@ -791,24 +809,28 @@ class BackrefPropagatesForwardsArgs(_base.MappedTest):
])
class AmbiguousJoinInterpretedAsSelfRef(_base.MappedTest):
- """test ambiguous joins due to FKs on both sides treated as self-referential.
+ """test ambiguous joins due to FKs on both sides treated as
+ self-referential.
- this mapping is very similar to that of test/orm/inheritance/query.py
- SelfReferentialTestJoinedToBase , except that inheritance is not used
- here.
+ this mapping is very similar to that of
+ test/orm/inheritance/query.py
+ SelfReferentialTestJoinedToBase , except that inheritance is
+ not used here.
"""
@classmethod
def define_tables(cls, metadata):
subscriber_table = Table('subscriber', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('dummy', String(10)) # to appease older sqlite version
)
address_table = Table('address',
metadata,
- Column('subscriber_id', Integer, ForeignKey('subscriber.id'), primary_key=True),
+ Column('subscriber_id', Integer,
+ ForeignKey('subscriber.id'), primary_key=True),
Column('type', String(1), primary_key=True),
)
@@ -816,7 +838,8 @@ class AmbiguousJoinInterpretedAsSelfRef(_base.MappedTest):
@testing.resolve_artifact_names
def setup_mappers(cls):
subscriber_and_address = subscriber.join(address,
- and_(address.c.subscriber_id==subscriber.c.id, address.c.type.in_(['A', 'B', 'C'])))
+ and_(address.c.subscriber_id==subscriber.c.id,
+ address.c.type.in_(['A', 'B', 'C'])))
class Address(_base.ComparableEntity):
pass
@@ -918,8 +941,10 @@ class ManualBackrefTest(_fixtures.FixtureTest):
})
assert_raises_message(sa.exc.ArgumentError,
- r"reverse_property 'dingaling' on relationship User.addresses references "
- "relationship Address.dingaling, which does not reference mapper Mapper\|User\|users",
+ r"reverse_property 'dingaling' on relationship "
+ "User.addresses references "
+ "relationship Address.dingaling, which does not "
+ "reference mapper Mapper\|User\|users",
compile_mappers)
class JoinConditionErrorTest(testing.TestBase):
@@ -966,7 +991,8 @@ class JoinConditionErrorTest(testing.TestBase):
class C2(object):
pass
- mapper(C1, t1, properties={'c2':relationship(C2, primaryjoin=t1.join(t2))})
+ mapper(C1, t1, properties={'c2':relationship(C2,
+ primaryjoin=t1.join(t2))})
mapper(C2, t2)
assert_raises(sa.exc.ArgumentError, compile_mappers)
@@ -996,7 +1022,9 @@ class JoinConditionErrorTest(testing.TestBase):
assert_raises_message(
sa.exc.ArgumentError,
- "Column-based expression object expected for argument '%s'; got: '%s', type %r" % (argname, arg[0], type(arg[0])),
+ "Column-based expression object expected "
+ "for argument '%s'; got: '%s', type %r" %
+ (argname, arg[0], type(arg[0])),
compile_mappers)
@@ -1053,23 +1081,28 @@ class JoinConditionErrorTest(testing.TestBase):
clear_mappers()
class TypeMatchTest(_base.MappedTest):
- """test errors raised when trying to add items whose type is not handled by a relationship"""
+ """test errors raised when trying to add items
+ whose type is not handled by a relationship"""
@classmethod
def define_tables(cls, metadata):
Table("a", metadata,
- Column('aid', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('aid', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(30)))
Table("b", metadata,
- Column('bid', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('bid', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column("a_id", Integer, ForeignKey("a.aid")),
Column('data', String(30)))
Table("c", metadata,
- Column('cid', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('cid', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column("b_id", Integer, ForeignKey("b.bid")),
Column('data', String(30)))
Table("d", metadata,
- Column('did', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('did', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column("a_id", Integer, ForeignKey("a.aid")),
Column('data', String(30)))
@@ -1115,7 +1148,8 @@ class TypeMatchTest(_base.MappedTest):
sess.add(b1)
sess.add(c1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item", sess.flush)
+ "Attempting to flush an item",
+ sess.flush)
@testing.resolve_artifact_names
def test_o2m_nopoly_onflush(self):
@@ -1136,7 +1170,8 @@ class TypeMatchTest(_base.MappedTest):
sess.add(b1)
sess.add(c1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item", sess.flush)
+ "Attempting to flush an item",
+ sess.flush)
@testing.resolve_artifact_names
def test_m2o_nopoly_onflush(self):
@@ -1153,7 +1188,8 @@ class TypeMatchTest(_base.MappedTest):
sess.add(b1)
sess.add(d1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item", sess.flush)
+ "Attempting to flush an item",
+ sess.flush)
@testing.resolve_artifact_names
def test_m2o_oncascade(self):
@@ -1168,7 +1204,8 @@ class TypeMatchTest(_base.MappedTest):
d1.a = b1
sess = create_session()
assert_raises_message(AssertionError,
- "doesn't handle objects of type", sess.add, d1)
+ "doesn't handle objects of type",
+ sess.add, d1)
class TypedAssociationTable(_base.MappedTest):
@@ -1224,10 +1261,12 @@ class ViewOnlyM2MBackrefTest(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(40)))
Table("t2", metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(40)),
)
Table("t1t2", metadata,
@@ -1241,7 +1280,8 @@ class ViewOnlyM2MBackrefTest(_base.MappedTest):
class B(_base.ComparableEntity):pass
mapper(A, t1, properties={
- 'bs':relationship(B, secondary=t1t2, backref=backref('as_', viewonly=True))
+ 'bs':relationship(B, secondary=t1t2,
+ backref=backref('as_', viewonly=True))
})
mapper(B, t2)
@@ -1264,14 +1304,17 @@ class ViewOnlyOverlappingNames(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(40)))
Table("t2", metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(40)),
Column('t1id', Integer, ForeignKey('t1.id')))
Table("t3", metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(40)),
Column('t2id', Integer, ForeignKey('t2.id')))
@@ -1324,14 +1367,17 @@ class ViewOnlyUniqueNames(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('t1id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('t1id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(40)))
Table("t2", metadata,
- Column('t2id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('t2id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(40)),
Column('t1id_ref', Integer, ForeignKey('t1.t1id')))
Table("t3", metadata,
- Column('t3id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('t3id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(40)),
Column('t2id_ref', Integer, ForeignKey('t2.t2id')))
@@ -1505,10 +1551,12 @@ class ViewOnlyRepeatedLocalColumn(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('foos', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)))
- Table('bars', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Table('bars', metadata, Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('fid1', Integer, ForeignKey('foos.id')),
Column('fid2', Integer, ForeignKey('foos.id')),
Column('data', String(50)))
@@ -1553,14 +1601,17 @@ class ViewOnlyComplexJoin(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)))
Table('t2', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)),
Column('t1id', Integer, ForeignKey('t1.id')))
Table('t3', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)))
Table('t2tot3', metadata,
Column('t2id', Integer, ForeignKey('t2.id')),
@@ -1624,10 +1675,12 @@ class ExplicitLocalRemoteTest(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', String(50), primary_key=True, test_needs_autoincrement=True),
+ Column('id', String(50), primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)))
Table('t2', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)),
Column('t1id', String(50)))
@@ -1777,19 +1830,25 @@ class InvalidRemoteSideTest(_base.MappedTest):
't1s':relationship(T1, backref='parent')
})
- assert_raises_message(sa.exc.ArgumentError, "T1.t1s and back-reference T1.parent are "
- "both of the same direction <symbol 'ONETOMANY>. Did you "
- "mean to set remote_side on the many-to-one side ?", sa.orm.compile_mappers)
+ assert_raises_message(sa.exc.ArgumentError,
+ "T1.t1s and back-reference T1.parent are "
+ "both of the same direction <symbol 'ONETOMANY>. Did you "
+ "mean to set remote_side on the many-to-one side ?",
+ sa.orm.compile_mappers)
@testing.resolve_artifact_names
def test_m2o_backref(self):
mapper(T1, t1, properties={
- 't1s':relationship(T1, backref=backref('parent', remote_side=t1.c.id), remote_side=t1.c.id)
+ 't1s':relationship(T1,
+ backref=backref('parent', remote_side=t1.c.id),
+ remote_side=t1.c.id)
})
- assert_raises_message(sa.exc.ArgumentError, "T1.t1s and back-reference T1.parent are "
- "both of the same direction <symbol 'MANYTOONE>. Did you "
- "mean to set remote_side on the many-to-one side ?", sa.orm.compile_mappers)
+ assert_raises_message(sa.exc.ArgumentError,
+ "T1.t1s and back-reference T1.parent are "
+ "both of the same direction <symbol 'MANYTOONE>. Did you "
+ "mean to set remote_side on the many-to-one side ?",
+ sa.orm.compile_mappers)
@testing.resolve_artifact_names
def test_o2m_explicit(self):
@@ -1800,20 +1859,24 @@ class InvalidRemoteSideTest(_base.MappedTest):
# can't be sure of ordering here
assert_raises_message(sa.exc.ArgumentError,
- "both of the same direction <symbol 'ONETOMANY>. Did you "
- "mean to set remote_side on the many-to-one side ?", sa.orm.compile_mappers)
+ "both of the same direction <symbol 'ONETOMANY>. Did you "
+ "mean to set remote_side on the many-to-one side ?",
+ sa.orm.compile_mappers)
@testing.resolve_artifact_names
def test_m2o_explicit(self):
mapper(T1, t1, properties={
- 't1s':relationship(T1, back_populates='parent', remote_side=t1.c.id),
- 'parent':relationship(T1, back_populates='t1s', remote_side=t1.c.id)
+ 't1s':relationship(T1, back_populates='parent',
+ remote_side=t1.c.id),
+ 'parent':relationship(T1, back_populates='t1s',
+ remote_side=t1.c.id)
})
# can't be sure of ordering here
assert_raises_message(sa.exc.ArgumentError,
- "both of the same direction <symbol 'MANYTOONE>. Did you "
- "mean to set remote_side on the many-to-one side ?", sa.orm.compile_mappers)
+ "both of the same direction <symbol 'MANYTOONE>. Did you "
+ "mean to set remote_side on the many-to-one side ?",
+ sa.orm.compile_mappers)
class InvalidRelationshipEscalationTest(_base.MappedTest):
@@ -1872,7 +1935,8 @@ class InvalidRelationshipEscalationTest(_base.MappedTest):
assert_raises_message(
sa.exc.ArgumentError,
- "Could not determine relationship direction for primaryjoin condition",
+ "Could not determine relationship direction "
+ "for primaryjoin condition",
sa.orm.compile_mappers)
@testing.resolve_artifact_names
@@ -1953,7 +2017,8 @@ class InvalidRelationshipEscalationTest(_base.MappedTest):
assert_raises_message(
sa.exc.ArgumentError,
- "Could not determine relationship direction for primaryjoin condition",
+ "Could not determine relationship direction for primaryjoin "
+ "condition",
sa.orm.compile_mappers)
@testing.resolve_artifact_names
@@ -2036,13 +2101,14 @@ class InvalidRelationshipEscalationTest(_base.MappedTest):
assert_raises_message(
sa.exc.ArgumentError,
- "Could not determine relationship direction for primaryjoin condition",
+ "Could not determine relationship direction for primaryjoin "
+ "condition",
sa.orm.compile_mappers)
sa.orm.clear_mappers()
mapper(Foo, foos_with_fks, properties={
'bars':relationship(Bar,
- primaryjoin=foos_with_fks.c.id==bars_with_fks.c.fid)})
+ primaryjoin=foos_with_fks.c.id==bars_with_fks.c.fid)})
mapper(Bar, bars_with_fks)
sa.orm.compile_mappers()
@@ -2054,7 +2120,8 @@ class InvalidRelationshipEscalationTest(_base.MappedTest):
assert_raises_message(
sa.exc.ArgumentError,
- "Could not determine relationship direction for primaryjoin condition",
+ "Could not determine relationship direction for primaryjoin "
+ "condition",
sa.orm.compile_mappers)
@@ -2067,7 +2134,8 @@ class InvalidRelationshipEscalationTest(_base.MappedTest):
assert_raises_message(
sa.exc.ArgumentError,
- "Could not determine relationship direction for primaryjoin condition",
+ "Could not determine relationship direction for primaryjoin "
+ "condition",
sa.orm.compile_mappers)
@@ -2148,9 +2216,12 @@ class InvalidRelationshipEscalationTestM2M(_base.MappedTest):
sa.orm.clear_mappers()
mapper(Foo, foos, properties={
- 'bars': relationship(Bar, secondary=foobars_with_many_columns,
- primaryjoin=foos.c.id==foobars_with_many_columns.c.fid,
- secondaryjoin=foobars_with_many_columns.c.bid==bars.c.id)})
+ 'bars': relationship(Bar,
+ secondary=foobars_with_many_columns,
+ primaryjoin=foos.c.id==
+ foobars_with_many_columns.c.fid,
+ secondaryjoin=foobars_with_many_columns.c.bid==
+ bars.c.id)})
mapper(Bar, bars)
assert_raises_message(sa.exc.SAWarning,
@@ -2188,9 +2259,12 @@ class InvalidRelationshipEscalationTestM2M(_base.MappedTest):
sa.orm.clear_mappers()
mapper(Foo, foos, properties={
- 'bars': relationship(Bar, secondary=foobars_with_many_columns,
- primaryjoin=foos.c.id==foobars_with_many_columns.c.fid,
- secondaryjoin=foobars_with_many_columns.c.bid==bars.c.id)})
+ 'bars': relationship(Bar,
+ secondary=foobars_with_many_columns,
+ primaryjoin=foos.c.id==
+ foobars_with_many_columns.c.fid,
+ secondaryjoin=foobars_with_many_columns.c.bid==
+ bars.c.id)})
mapper(Bar, bars)
sa.orm.compile_mappers()
eq_(
@@ -2214,7 +2288,8 @@ class InvalidRelationshipEscalationTestM2M(_base.MappedTest):
assert_raises_message(
sa.exc.ArgumentError,
- "Could not determine relationship direction for primaryjoin condition",
+ "Could not determine relationship direction for "
+ "primaryjoin condition",
sa.orm.compile_mappers)
sa.orm.clear_mappers()
@@ -2226,7 +2301,8 @@ class InvalidRelationshipEscalationTestM2M(_base.MappedTest):
mapper(Bar, bars)
assert_raises_message(
sa.exc.ArgumentError,
- "Could not locate any equated, locally mapped column pairs for primaryjoin condition ",
+ "Could not locate any equated, locally mapped column pairs for "
+ "primaryjoin condition ",
sa.orm.compile_mappers)
sa.orm.clear_mappers()
@@ -2279,8 +2355,71 @@ class InvalidRelationshipEscalationTestM2M(_base.MappedTest):
"Could not locate any equated, locally mapped column pairs for "
"secondaryjoin condition", sa.orm.compile_mappers)
+class ActiveHistoryFlagTest(_fixtures.FixtureTest):
+ run_inserts = None
+ run_deletes = None
+
+ def _test_attribute(self, obj, attrname, newvalue):
+ sess = Session()
+ sess.add(obj)
+ oldvalue = getattr(obj, attrname)
+ sess.commit()
+
+ # expired
+ assert attrname not in obj.__dict__
+
+ setattr(obj, attrname, newvalue)
+ eq_(
+ attributes.get_history(obj, attrname),
+ ([newvalue,], (), [oldvalue,])
+ )
+
+ @testing.resolve_artifact_names
+ def test_column_property_flag(self):
+ mapper(User, users, properties={
+ 'name':column_property(users.c.name, active_history=True)
+ })
+ u1 = User(name='jack')
+ self._test_attribute(u1, 'name', 'ed')
+
+ @testing.resolve_artifact_names
+ def test_relationship_property_flag(self):
+ mapper(Address, addresses, properties={
+ 'user':relationship(User, active_history=True)
+ })
+ mapper(User, users)
+ u1 = User(name='jack')
+ u2 = User(name='ed')
+ a1 = Address(email_address='a1', user=u1)
+ self._test_attribute(a1, 'user', u2)
+
+ @testing.resolve_artifact_names
+ def test_composite_property_flag(self):
+ # active_history is implicit for composites
+ # right now, no flag needed
+ class MyComposite(object):
+ def __init__(self, description, isopen):
+ self.description = description
+ self.isopen = isopen
+ def __composite_values__(self):
+ return [self.description, self.isopen]
+ def __eq__(self, other):
+ return isinstance(other, MyComposite) and \
+ other.description == self.description
+ mapper(Order, orders, properties={
+ 'composite':composite(
+ MyComposite,
+ orders.c.description,
+ orders.c.isopen)
+ })
+ o1 = Order(composite=MyComposite('foo', 1))
+ self._test_attribute(o1, "composite", MyComposite('bar', 1))
+
+
class RelationDeprecationTest(_base.MappedTest):
+ """test usage of the old 'relation' function."""
+
run_inserts = 'once'
run_deletes = None