diff options
Diffstat (limited to 'lib/sqlalchemy/dialects/postgresql')
| -rw-r--r-- | lib/sqlalchemy/dialects/postgresql/array.py | 40 | ||||
| -rw-r--r-- | lib/sqlalchemy/dialects/postgresql/base.py | 141 | ||||
| -rw-r--r-- | lib/sqlalchemy/dialects/postgresql/dml.py | 13 | ||||
| -rw-r--r-- | lib/sqlalchemy/dialects/postgresql/ext.py | 20 | ||||
| -rw-r--r-- | lib/sqlalchemy/dialects/postgresql/hstore.py | 2 | ||||
| -rw-r--r-- | lib/sqlalchemy/dialects/postgresql/json.py | 48 | ||||
| -rw-r--r-- | lib/sqlalchemy/dialects/postgresql/psycopg2.py | 35 |
7 files changed, 170 insertions, 129 deletions
diff --git a/lib/sqlalchemy/dialects/postgresql/array.py b/lib/sqlalchemy/dialects/postgresql/array.py index 9f0f676cd..a3537ba60 100644 --- a/lib/sqlalchemy/dialects/postgresql/array.py +++ b/lib/sqlalchemy/dialects/postgresql/array.py @@ -25,7 +25,7 @@ def Any(other, arrexpr, operator=operators.eq): .. seealso:: - :func:`.expression.any_` + :func:`_expression.any_` """ @@ -39,7 +39,7 @@ def All(other, arrexpr, operator=operators.eq): .. seealso:: - :func:`.expression.all_` + :func:`_expression.all_` """ @@ -68,14 +68,16 @@ class array(expression.Tuple): ARRAY[%(param_3)s, %(param_4)s, %(param_5)s]) AS anon_1 An instance of :class:`.array` will always have the datatype - :class:`.ARRAY`. The "inner" type of the array is inferred from + :class:`_types.ARRAY`. The "inner" type of the array is inferred from the values present, unless the ``type_`` keyword argument is passed:: array(['foo', 'bar'], type_=CHAR) Multidimensional arrays are produced by nesting :class:`.array` constructs. - The dimensionality of the final :class:`.ARRAY` type is calculated by - recursively adding the dimensions of the inner :class:`.ARRAY` type:: + The dimensionality of the final :class:`_types.ARRAY` + type is calculated by + recursively adding the dimensions of the inner :class:`_types.ARRAY` + type:: stmt = select([ array([ @@ -93,7 +95,7 @@ class array(expression.Tuple): .. seealso:: - :class:`.postgresql.ARRAY` + :class:`_postgresql.ARRAY` """ @@ -150,11 +152,11 @@ class ARRAY(sqltypes.ARRAY): """PostgreSQL ARRAY type. - .. versionchanged:: 1.1 The :class:`.postgresql.ARRAY` type is now - a subclass of the core :class:`.types.ARRAY` type. + .. versionchanged:: 1.1 The :class:`_postgresql.ARRAY` type is now + a subclass of the core :class:`_types.ARRAY` type. - The :class:`.postgresql.ARRAY` type is constructed in the same way - as the core :class:`.types.ARRAY` type; a member type is required, and a + The :class:`_postgresql.ARRAY` type is constructed in the same way + as the core :class:`_types.ARRAY` type; a member type is required, and a number of dimensions is recommended if the type is to be used for more than one dimension:: @@ -164,11 +166,12 @@ class ARRAY(sqltypes.ARRAY): Column("data", postgresql.ARRAY(Integer, dimensions=2)) ) - The :class:`.postgresql.ARRAY` type provides all operations defined on the - core :class:`.types.ARRAY` type, including support for "dimensions", + The :class:`_postgresql.ARRAY` type provides all operations defined on the + core :class:`_types.ARRAY` type, including support for "dimensions", indexed access, and simple matching such as :meth:`.types.ARRAY.Comparator.any` and - :meth:`.types.ARRAY.Comparator.all`. :class:`.postgresql.ARRAY` class also + :meth:`.types.ARRAY.Comparator.all`. :class:`_postgresql.ARRAY` + class also provides PostgreSQL-specific methods for containment operations, including :meth:`.postgresql.ARRAY.Comparator.contains` :meth:`.postgresql.ARRAY.Comparator.contained_by`, and @@ -176,24 +179,25 @@ class ARRAY(sqltypes.ARRAY): mytable.c.data.contains([1, 2]) - The :class:`.postgresql.ARRAY` type may not be supported on all + The :class:`_postgresql.ARRAY` type may not be supported on all PostgreSQL DBAPIs; it is currently known to work on psycopg2 only. - Additionally, the :class:`.postgresql.ARRAY` type does not work directly in + Additionally, the :class:`_postgresql.ARRAY` + type does not work directly in conjunction with the :class:`.ENUM` type. For a workaround, see the special type at :ref:`postgresql_array_of_enum`. .. seealso:: - :class:`.types.ARRAY` - base array type + :class:`_types.ARRAY` - base array type - :class:`.postgresql.array` - produces a literal array value. + :class:`_postgresql.array` - produces a literal array value. """ class Comparator(sqltypes.ARRAY.Comparator): - """Define comparison operations for :class:`.ARRAY`. + """Define comparison operations for :class:`_types.ARRAY`. Note that these operations are in addition to those provided by the base :class:`.types.ARRAY.Comparator` class, including diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index cb41a8f65..670de4ebf 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -86,7 +86,7 @@ All PostgreSQL dialects support setting of transaction isolation level both via a dialect-specific parameter :paramref:`.create_engine.isolation_level` accepted by :func:`.create_engine`, as well as the :paramref:`.Connection.execution_options.isolation_level` -argument as passed to :meth:`.Connection.execution_options`. +argument as passed to :meth:`_engine.Connection.execution_options`. When using a non-psycopg2 dialect, this feature works by issuing the command ``SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL <level>`` for each new connection. For the special AUTOCOMMIT isolation level, @@ -129,11 +129,13 @@ Remote-Schema Table Introspection and PostgreSQL search_path name schemas **other** than ``public`` explicitly within ``Table`` definitions. The PostgreSQL dialect can reflect tables from any schema. The -:paramref:`.Table.schema` argument, or alternatively the +:paramref:`_schema.Table.schema` argument, or alternatively the :paramref:`.MetaData.reflect.schema` argument determines which schema will -be searched for the table or tables. The reflected :class:`.Table` objects +be searched for the table or tables. The reflected :class:`_schema.Table` +objects will in all cases retain this ``.schema`` attribute as was specified. -However, with regards to tables which these :class:`.Table` objects refer to +However, with regards to tables which these :class:`_schema.Table` +objects refer to via foreign key constraint, a decision must be made as to how the ``.schema`` is represented in those remote tables, in the case where that remote schema name is also a member of the current @@ -205,7 +207,8 @@ reflection process as follows:: ... <sqlalchemy.engine.result.ResultProxy object at 0x101612ed0> -The above process would deliver to the :attr:`.MetaData.tables` collection +The above process would deliver to the :attr:`_schema.MetaData.tables` +collection ``referred`` table named **without** the schema:: >>> meta.tables['referred'].schema is None @@ -214,8 +217,8 @@ The above process would deliver to the :attr:`.MetaData.tables` collection To alter the behavior of reflection such that the referred schema is maintained regardless of the ``search_path`` setting, use the ``postgresql_ignore_search_path`` option, which can be specified as a -dialect-specific argument to both :class:`.Table` as well as -:meth:`.MetaData.reflect`:: +dialect-specific argument to both :class:`_schema.Table` as well as +:meth:`_schema.MetaData.reflect`:: >>> with engine.connect() as conn: ... conn.execute(text("SET search_path TO test_schema, public")) @@ -239,7 +242,7 @@ We will now have ``test_schema.referred`` stored as schema-qualified:: you just stick to the simplest use pattern: leave the ``search_path`` set to its default of ``public`` only, never refer to the name ``public`` as an explicit schema name otherwise, and refer to all other schema names - explicitly when building up a :class:`.Table` object. The options + explicitly when building up a :class:`_schema.Table` object. The options described here are only for those users who can't, or prefer not to, stay within these guidelines. @@ -251,8 +254,8 @@ which is in the ``public`` (i.e. default) schema will always have the ``.schema`` attribute set to ``None``. .. versionadded:: 0.9.2 Added the ``postgresql_ignore_search_path`` - dialect-level option accepted by :class:`.Table` and - :meth:`.MetaData.reflect`. + dialect-level option accepted by :class:`_schema.Table` and + :meth:`_schema.MetaData.reflect`. .. seealso:: @@ -304,7 +307,7 @@ or they may be *inferred* by stating the columns and conditions that comprise the indexes. SQLAlchemy provides ``ON CONFLICT`` support via the PostgreSQL-specific -:func:`.postgresql.insert()` function, which provides +:func:`_postgresql.insert()` function, which provides the generative methods :meth:`~.postgresql.Insert.on_conflict_do_update` and :meth:`~.postgresql.Insert.on_conflict_do_nothing`:: @@ -331,7 +334,7 @@ Both methods supply the "target" of the conflict using either the named constraint or by column inference: * The :paramref:`.Insert.on_conflict_do_update.index_elements` argument - specifies a sequence containing string column names, :class:`.Column` + specifies a sequence containing string column names, :class:`_schema.Column` objects, and/or SQL expression elements, which would identify a unique index:: @@ -381,8 +384,9 @@ named constraint or by column inference: constraint is unnamed, then inference will be used, where the expressions and optional WHERE clause of the constraint will be spelled out in the construct. This use is especially convenient - to refer to the named or unnamed primary key of a :class:`.Table` using the - :attr:`.Table.primary_key` attribute:: + to refer to the named or unnamed primary key of a :class:`_schema.Table` + using the + :attr:`_schema.Table.primary_key` attribute:: do_update_stmt = insert_stmt.on_conflict_do_update( constraint=my_table.primary_key, @@ -407,17 +411,19 @@ for UPDATE:: .. warning:: - The :meth:`.Insert.on_conflict_do_update` method does **not** take into + The :meth:`_expression.Insert.on_conflict_do_update` + method does **not** take into account Python-side default UPDATE values or generation functions, e.g. - those specified using :paramref:`.Column.onupdate`. + those specified using :paramref:`_schema.Column.onupdate`. These values will not be exercised for an ON CONFLICT style of UPDATE, unless they are manually specified in the :paramref:`.Insert.on_conflict_do_update.set_` dictionary. In order to refer to the proposed insertion row, the special alias :attr:`~.postgresql.Insert.excluded` is available as an attribute on -the :class:`.postgresql.Insert` object; this object is a -:class:`.ColumnCollection` which alias contains all columns of the target +the :class:`_postgresql.Insert` object; this object is a +:class:`_expression.ColumnCollection` +which alias contains all columns of the target table:: from sqlalchemy.dialects.postgresql import insert @@ -432,7 +438,7 @@ table:: ) conn.execute(do_update_stmt) -The :meth:`.Insert.on_conflict_do_update` method also accepts +The :meth:`_expression.Insert.on_conflict_do_update` method also accepts a WHERE clause using the :paramref:`.Insert.on_conflict_do_update.where` parameter, which will limit those rows which receive an UPDATE:: @@ -484,7 +490,8 @@ Full Text Search ---------------- SQLAlchemy makes available the PostgreSQL ``@@`` operator via the -:meth:`.ColumnElement.match` method on any textual column expression. +:meth:`_expression.ColumnElement.match` +method on any textual column expression. On a PostgreSQL dialect, an expression like the following:: select([sometable.c.text.match("search string")]) @@ -505,7 +512,7 @@ Emits the equivalent of:: SELECT to_tsvector('fat cats ate rats') @@ to_tsquery('cat & rat') -The :class:`.postgresql.TSVECTOR` type can provide for explicit CAST:: +The :class:`_postgresql.TSVECTOR` type can provide for explicit CAST:: from sqlalchemy.dialects.postgresql import TSVECTOR from sqlalchemy import select, cast @@ -613,8 +620,9 @@ The :class:`.Index` construct allows these to be specified via the }) Note that the keys in the ``postgresql_ops`` dictionary are the "key" name of -the :class:`.Column`, i.e. the name used to access it from the ``.c`` -collection of :class:`.Table`, which can be configured to be different than +the :class:`_schema.Column`, i.e. the name used to access it from the ``.c`` +collection of :class:`_schema.Table`, +which can be configured to be different than the actual name of the column as expressed in the database. If ``postgresql_ops`` is to be used against a complex SQL expression such @@ -666,7 +674,7 @@ The tablespace can be specified on :class:`.Index` using the .. versionadded:: 1.1 -Note that the same option is available on :class:`.Table` as well. +Note that the same option is available on :class:`_schema.Table` as well. .. _postgresql_index_concurrently: @@ -722,25 +730,30 @@ PostgreSQL Index Reflection The PostgreSQL database creates a UNIQUE INDEX implicitly whenever the UNIQUE CONSTRAINT construct is used. When inspecting a table using -:class:`.Inspector`, the :meth:`.Inspector.get_indexes` -and the :meth:`.Inspector.get_unique_constraints` will report on these +:class:`_reflection.Inspector`, the :meth:`_reflection.Inspector.get_indexes` +and the :meth:`_reflection.Inspector.get_unique_constraints` +will report on these two constructs distinctly; in the case of the index, the key ``duplicates_constraint`` will be present in the index entry if it is detected as mirroring a constraint. When performing reflection using ``Table(..., autoload=True)``, the UNIQUE INDEX is **not** returned -in :attr:`.Table.indexes` when it is detected as mirroring a -:class:`.UniqueConstraint` in the :attr:`.Table.constraints` collection. +in :attr:`_schema.Table.indexes` when it is detected as mirroring a +:class:`.UniqueConstraint` in the :attr:`_schema.Table.constraints` collection +. -.. versionchanged:: 1.0.0 - :class:`.Table` reflection now includes - :class:`.UniqueConstraint` objects present in the :attr:`.Table.constraints` +.. versionchanged:: 1.0.0 - :class:`_schema.Table` reflection now includes + :class:`.UniqueConstraint` objects present in the + :attr:`_schema.Table.constraints` collection; the PostgreSQL backend will no longer include a "mirrored" - :class:`.Index` construct in :attr:`.Table.indexes` if it is detected + :class:`.Index` construct in :attr:`_schema.Table.indexes` + if it is detected as corresponding to a unique constraint. Special Reflection Options -------------------------- -The :class:`.Inspector` used for the PostgreSQL backend is an instance +The :class:`_reflection.Inspector` +used for the PostgreSQL backend is an instance of :class:`.PGInspector`, which offers additional methods:: from sqlalchemy import create_engine, inspect @@ -759,7 +772,7 @@ PostgreSQL Table Options ------------------------ Several options for CREATE TABLE are supported directly by the PostgreSQL -dialect in conjunction with the :class:`.Table` construct: +dialect in conjunction with the :class:`_schema.Table` construct: * ``TABLESPACE``:: @@ -805,13 +818,13 @@ ARRAY Types The PostgreSQL dialect supports arrays, both as multidimensional column types as well as array literals: -* :class:`.postgresql.ARRAY` - ARRAY datatype +* :class:`_postgresql.ARRAY` - ARRAY datatype -* :class:`.postgresql.array` - array literal +* :class:`_postgresql.array` - array literal -* :func:`.postgresql.array_agg` - ARRAY_AGG SQL function +* :func:`_postgresql.array_agg` - ARRAY_AGG SQL function -* :class:`.postgresql.aggregate_order_by` - helper for PG's ORDER BY aggregate +* :class:`_postgresql.aggregate_order_by` - helper for PG's ORDER BY aggregate function syntax. JSON Types @@ -821,18 +834,18 @@ The PostgreSQL dialect supports both JSON and JSONB datatypes, including psycopg2's native support and support for all of PostgreSQL's special operators: -* :class:`.postgresql.JSON` +* :class:`_postgresql.JSON` -* :class:`.postgresql.JSONB` +* :class:`_postgresql.JSONB` HSTORE Type ----------- The PostgreSQL HSTORE type as well as hstore literals are supported: -* :class:`.postgresql.HSTORE` - HSTORE datatype +* :class:`_postgresql.HSTORE` - HSTORE datatype -* :class:`.postgresql.hstore` - hstore literal +* :class:`_postgresql.hstore` - hstore literal ENUM Types ---------- @@ -843,7 +856,7 @@ complexity on the SQLAlchemy side in terms of when this type should be CREATED and DROPPED. The type object is also an independently reflectable entity. The following sections should be consulted: -* :class:`.postgresql.ENUM` - DDL and typing support for ENUM. +* :class:`_postgresql.ENUM` - DDL and typing support for ENUM. * :meth:`.PGInspector.get_enums` - retrieve a listing of current ENUM types @@ -858,7 +871,7 @@ Using ENUM with ARRAY The combination of ENUM and ARRAY is not directly supported by backend DBAPIs at this time. In order to send and receive an ARRAY of ENUM, use the following workaround type, which decorates the -:class:`.postgresql.ARRAY` datatype. +:class:`_postgresql.ARRAY` datatype. .. sourcecode:: python @@ -1268,7 +1281,7 @@ PGUuid = UUID class TSVECTOR(sqltypes.TypeEngine): - """The :class:`.postgresql.TSVECTOR` type implements the PostgreSQL + """The :class:`_postgresql.TSVECTOR` type implements the PostgreSQL text search type TSVECTOR. It can be used to do full text queries on natural language @@ -1289,12 +1302,12 @@ class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum): """PostgreSQL ENUM type. - This is a subclass of :class:`.types.Enum` which includes + This is a subclass of :class:`_types.Enum` which includes support for PG's ``CREATE TYPE`` and ``DROP TYPE``. - When the builtin type :class:`.types.Enum` is used and the + When the builtin type :class:`_types.Enum` is used and the :paramref:`.Enum.native_enum` flag is left at its default of - True, the PostgreSQL backend will use a :class:`.postgresql.ENUM` + True, the PostgreSQL backend will use a :class:`_postgresql.ENUM` type as the implementation, so the special create/drop rules will be used. @@ -1303,9 +1316,10 @@ class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum): parent table, in that it may be "owned" by just a single table, or may be shared among many tables. - When using :class:`.types.Enum` or :class:`.postgresql.ENUM` + When using :class:`_types.Enum` or :class:`_postgresql.ENUM` in an "inline" fashion, the ``CREATE TYPE`` and ``DROP TYPE`` is emitted - corresponding to when the :meth:`.Table.create` and :meth:`.Table.drop` + corresponding to when the :meth:`_schema.Table.create` and + :meth:`_schema.Table.drop` methods are called:: table = Table('sometable', metadata, @@ -1316,9 +1330,9 @@ class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum): table.drop(engine) # will emit DROP TABLE and DROP ENUM To use a common enumerated type between multiple tables, the best - practice is to declare the :class:`.types.Enum` or - :class:`.postgresql.ENUM` independently, and associate it with the - :class:`.MetaData` object itself:: + practice is to declare the :class:`_types.Enum` or + :class:`_postgresql.ENUM` independently, and associate it with the + :class:`_schema.MetaData` object itself:: my_enum = ENUM('a', 'b', 'c', name='myenum', metadata=metadata) @@ -1353,7 +1367,7 @@ class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum): my_enum.create(engine) my_enum.drop(engine) - .. versionchanged:: 1.0.0 The PostgreSQL :class:`.postgresql.ENUM` type + .. versionchanged:: 1.0.0 The PostgreSQL :class:`_postgresql.ENUM` type now behaves more strictly with regards to CREATE/DROP. A metadata-level ENUM type will only be created and dropped at the metadata level, not the table level, with the exception of @@ -1366,10 +1380,10 @@ class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum): native_enum = True def __init__(self, *enums, **kw): - """Construct an :class:`~.postgresql.ENUM`. + """Construct an :class:`_postgresql.ENUM`. Arguments are the same as that of - :class:`.types.Enum`, but also including + :class:`_types.Enum`, but also including the following parameters. :param create_type: Defaults to True. @@ -1397,7 +1411,7 @@ class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum): @classmethod def adapt_emulated_to_native(cls, impl, **kw): - """Produce a PostgreSQL native :class:`.postgresql.ENUM` from plain + """Produce a PostgreSQL native :class:`_postgresql.ENUM` from plain :class:`.Enum`. """ @@ -1412,13 +1426,13 @@ class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum): def create(self, bind=None, checkfirst=True): """Emit ``CREATE TYPE`` for this - :class:`~.postgresql.ENUM`. + :class:`_postgresql.ENUM`. If the underlying dialect does not support PostgreSQL CREATE TYPE, no action is taken. - :param bind: a connectable :class:`.Engine`, - :class:`.Connection`, or similar object to emit + :param bind: a connectable :class:`_engine.Engine`, + :class:`_engine.Connection`, or similar object to emit SQL. :param checkfirst: if ``True``, a query against the PG catalog will be first performed to see @@ -1436,13 +1450,13 @@ class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum): def drop(self, bind=None, checkfirst=True): """Emit ``DROP TYPE`` for this - :class:`~.postgresql.ENUM`. + :class:`_postgresql.ENUM`. If the underlying dialect does not support PostgreSQL DROP TYPE, no action is taken. - :param bind: a connectable :class:`.Engine`, - :class:`.Connection`, or similar object to emit + :param bind: a connectable :class:`_engine.Engine`, + :class:`_engine.Connection`, or similar object to emit SQL. :param checkfirst: if ``True``, a query against the PG catalog will be first performed to see @@ -2276,7 +2290,8 @@ class PGInspector(reflection.Inspector): def get_foreign_table_names(self, schema=None): """Return a list of FOREIGN TABLE names. - Behavior is similar to that of :meth:`.Inspector.get_table_names`, + Behavior is similar to that of + :meth:`_reflection.Inspector.get_table_names`, except that the list is limited to those tables that report a ``relkind`` value of ``f``. diff --git a/lib/sqlalchemy/dialects/postgresql/dml.py b/lib/sqlalchemy/dialects/postgresql/dml.py index 626f81018..70d26a94b 100644 --- a/lib/sqlalchemy/dialects/postgresql/dml.py +++ b/lib/sqlalchemy/dialects/postgresql/dml.py @@ -23,7 +23,7 @@ class Insert(StandardInsert): Adds methods for PG-specific syntaxes such as ON CONFLICT. - The :class:`.postgresql.Insert` object is created using the + The :class:`_postgresql.Insert` object is created using the :func:`sqlalchemy.dialects.postgresql.insert` function. .. versionadded:: 1.1 @@ -41,7 +41,7 @@ class Insert(StandardInsert): .. seealso:: :ref:`postgresql_insert_on_conflict` - example of how - to use :attr:`.Insert.excluded` + to use :attr:`_expression.Insert.excluded` """ return alias(self.table, name="excluded").columns @@ -66,7 +66,7 @@ class Insert(StandardInsert): or the constraint object itself if it has a .name attribute. :param index_elements: - A sequence consisting of string column names, :class:`.Column` + A sequence consisting of string column names, :class:`_schema.Column` objects, or other column expression objects that will be used to infer a target index. @@ -78,12 +78,13 @@ class Insert(StandardInsert): Required argument. A dictionary or other mapping object with column names as keys and expressions or literals as values, specifying the ``SET`` actions to take. - If the target :class:`.Column` specifies a ".key" attribute distinct + If the target :class:`_schema.Column` specifies a ". + key" attribute distinct from the column name, that key should be used. .. warning:: This dictionary does **not** take into account Python-specified default UPDATE values or generation functions, - e.g. those specified using :paramref:`.Column.onupdate`. + e.g. those specified using :paramref:`_schema.Column.onupdate`. These values will not be exercised for an ON CONFLICT style of UPDATE, unless they are manually specified in the :paramref:`.Insert.on_conflict_do_update.set_` dictionary. @@ -122,7 +123,7 @@ class Insert(StandardInsert): or the constraint object itself if it has a .name attribute. :param index_elements: - A sequence consisting of string column names, :class:`.Column` + A sequence consisting of string column names, :class:`_schema.Column` objects, or other column expression objects that will be used to infer a target index. diff --git a/lib/sqlalchemy/dialects/postgresql/ext.py b/lib/sqlalchemy/dialects/postgresql/ext.py index f11919b4b..e64920719 100644 --- a/lib/sqlalchemy/dialects/postgresql/ext.py +++ b/lib/sqlalchemy/dialects/postgresql/ext.py @@ -46,7 +46,7 @@ class aggregate_order_by(expression.ColumnElement): .. seealso:: - :class:`.array_agg` + :class:`_functions.array_agg` """ @@ -113,7 +113,8 @@ class ExcludeConstraint(ColumnCollectionConstraint): where=(Column('group') != 'some group') ) - The constraint is normally embedded into the :class:`.Table` construct + The constraint is normally embedded into the :class:`_schema.Table` + construct directly, or added later using :meth:`.append_constraint`:: some_table = Table( @@ -136,11 +137,14 @@ class ExcludeConstraint(ColumnCollectionConstraint): A sequence of two tuples of the form ``(column, operator)`` where "column" is a SQL expression element or a raw SQL string, most - typically a :class:`.Column` object, and "operator" is a string + typically a :class:`_schema.Column` object, + and "operator" is a string containing the operator to use. In order to specify a column name - when a :class:`.Column` object is not available, while ensuring + when a :class:`_schema.Column` object is not available, + while ensuring that any necessary quoting rules take effect, an ad-hoc - :class:`.Column` or :func:`.sql.expression.column` object should be + :class:`_schema.Column` or :func:`_expression.column` + object should be used. :param name: @@ -230,9 +234,9 @@ class ExcludeConstraint(ColumnCollectionConstraint): def array_agg(*arg, **kw): - """PostgreSQL-specific form of :class:`.array_agg`, ensures - return type is :class:`.postgresql.ARRAY` and not - the plain :class:`.types.ARRAY`, unless an explicit ``type_`` + """PostgreSQL-specific form of :class:`_functions.array_agg`, ensures + return type is :class:`_postgresql.ARRAY` and not + the plain :class:`_types.ARRAY`, unless an explicit ``type_`` is passed. .. versionadded:: 1.1 diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py index 7f90ffa0e..679805183 100644 --- a/lib/sqlalchemy/dialects/postgresql/hstore.py +++ b/lib/sqlalchemy/dialects/postgresql/hstore.py @@ -141,7 +141,7 @@ class HSTORE(sqltypes.Indexable, sqltypes.Concatenable, sqltypes.TypeEngine): """Construct a new :class:`.HSTORE`. :param text_type: the type that should be used for indexed values. - Defaults to :class:`.types.Text`. + Defaults to :class:`_types.Text`. .. versionadded:: 1.1.0 diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py index 9661634c2..811159953 100644 --- a/lib/sqlalchemy/dialects/postgresql/json.py +++ b/lib/sqlalchemy/dialects/postgresql/json.py @@ -102,14 +102,14 @@ colspecs[sqltypes.JSON.JSONPathType] = JSONPathType class JSON(sqltypes.JSON): """Represent the PostgreSQL JSON type. - This type is a specialization of the Core-level :class:`.types.JSON` - type. Be sure to read the documentation for :class:`.types.JSON` for + This type is a specialization of the Core-level :class:`_types.JSON` + type. Be sure to read the documentation for :class:`_types.JSON` for important tips regarding treatment of NULL values and ORM use. - .. versionchanged:: 1.1 :class:`.postgresql.JSON` is now a PostgreSQL- - specific specialization of the new :class:`.types.JSON` type. + .. versionchanged:: 1.1 :class:`_postgresql.JSON` is now a PostgreSQL- + specific specialization of the new :class:`_types.JSON` type. - The operators provided by the PostgreSQL version of :class:`.JSON` + The operators provided by the PostgreSQL version of :class:`_types.JSON` include: * Index operations (the ``->`` operator):: @@ -142,13 +142,15 @@ class JSON(sqltypes.JSON): data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')].astext == 'some value' - .. versionchanged:: 1.1 The :meth:`.ColumnElement.cast` operator on + .. versionchanged:: 1.1 The :meth:`_expression.ColumnElement.cast` + operator on JSON objects now requires that the :attr:`.JSON.Comparator.astext` modifier be called explicitly, if the cast works only from a textual string. Index operations return an expression object whose type defaults to - :class:`.JSON` by default, so that further JSON-oriented instructions + :class:`_types.JSON` by default, + so that further JSON-oriented instructions may be called upon the result type. Custom serializers and deserializers are specified at the dialect level, @@ -166,16 +168,16 @@ class JSON(sqltypes.JSON): .. seealso:: - :class:`.types.JSON` - Core level JSON type + :class:`_types.JSON` - Core level JSON type - :class:`.JSONB` + :class:`_postgresql.JSONB` """ # noqa astext_type = sqltypes.Text() def __init__(self, none_as_null=False, astext_type=None): - """Construct a :class:`.JSON` type. + """Construct a :class:`_types.JSON` type. :param none_as_null: if True, persist the value ``None`` as a SQL NULL value, not the JSON encoding of ``null``. Note that @@ -190,11 +192,11 @@ class JSON(sqltypes.JSON): .. seealso:: - :attr:`.JSON.NULL` + :attr:`_types.JSON.NULL` :param astext_type: the type to use for the :attr:`.JSON.Comparator.astext` - accessor on indexed attributes. Defaults to :class:`.types.Text`. + accessor on indexed attributes. Defaults to :class:`_types.Text`. .. versionadded:: 1.1 @@ -204,7 +206,7 @@ class JSON(sqltypes.JSON): self.astext_type = astext_type class Comparator(sqltypes.JSON.Comparator): - """Define comparison operations for :class:`.JSON`.""" + """Define comparison operations for :class:`_types.JSON`.""" @property def astext(self): @@ -217,7 +219,7 @@ class JSON(sqltypes.JSON): .. seealso:: - :meth:`.ColumnElement.cast` + :meth:`_expression.ColumnElement.cast` """ if isinstance(self.expr.right.type, sqltypes.JSON.JSONPathType): @@ -241,7 +243,8 @@ ischema_names["json"] = JSON class JSONB(JSON): """Represent the PostgreSQL JSONB type. - The :class:`.JSONB` type stores arbitrary JSONB format data, e.g.:: + The :class:`_postgresql.JSONB` type stores arbitrary JSONB format data, e. + g.:: data_table = Table('data_table', metadata, Column('id', Integer, primary_key=True), @@ -254,19 +257,22 @@ class JSONB(JSON): data = {"key1": "value1", "key2": "value2"} ) - The :class:`.JSONB` type includes all operations provided by - :class:`.JSON`, including the same behaviors for indexing operations. + The :class:`_postgresql.JSONB` type includes all operations provided by + :class:`_types.JSON`, including the same behaviors for indexing operations + . It also adds additional operators specific to JSONB, including :meth:`.JSONB.Comparator.has_key`, :meth:`.JSONB.Comparator.has_all`, :meth:`.JSONB.Comparator.has_any`, :meth:`.JSONB.Comparator.contains`, and :meth:`.JSONB.Comparator.contained_by`. - Like the :class:`.JSON` type, the :class:`.JSONB` type does not detect + Like the :class:`_types.JSON` type, the :class:`_postgresql.JSONB` + type does not detect in-place changes when used with the ORM, unless the :mod:`sqlalchemy.ext.mutable` extension is used. Custom serializers and deserializers - are shared with the :class:`.JSON` class, using the ``json_serializer`` + are shared with the :class:`_types.JSON` class, + using the ``json_serializer`` and ``json_deserializer`` keyword arguments. These must be specified at the dialect level using :func:`.create_engine`. When using psycopg2, the serializers are associated with the jsonb type using @@ -278,14 +284,14 @@ class JSONB(JSON): .. seealso:: - :class:`.JSON` + :class:`_types.JSON` """ __visit_name__ = "JSONB" class Comparator(JSON.Comparator): - """Define comparison operations for :class:`.JSON`.""" + """Define comparison operations for :class:`_types.JSON`.""" def has_key(self, other): """Boolean expression. Test for presence of a key. Note that the diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index 89a63fd47..6d2672bbe 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -119,18 +119,21 @@ Per-Statement/Connection Execution Options ------------------------------------------- The following DBAPI-specific options are respected when used with -:meth:`.Connection.execution_options`, :meth:`.Executable.execution_options`, -:meth:`.Query.execution_options`, in addition to those not specific to DBAPIs: +:meth:`_engine.Connection.execution_options`, +:meth:`.Executable.execution_options`, +:meth:`_query.Query.execution_options`, +in addition to those not specific to DBAPIs: * ``isolation_level`` - Set the transaction isolation level for the lifespan - of a :class:`.Connection` (can only be set on a connection, not a statement + of a :class:`_engine.Connection` (can only be set on a connection, + not a statement or query). See :ref:`psycopg2_isolation_level`. * ``stream_results`` - Enable or disable usage of psycopg2 server side cursors - this feature makes use of "named" cursors in combination with special result handling methods so that result rows are not fully buffered. If ``None`` or not set, the ``server_side_cursors`` option of the - :class:`.Engine` is used. + :class:`_engine.Engine` is used. * ``max_row_buffer`` - when using ``stream_results``, an integer value that specifies the maximum number of rows to buffer at a time. This is @@ -153,7 +156,8 @@ Modern versions of psycopg2 include a feature known as have been shown in benchmarking to improve psycopg2's executemany() performance, primarily with INSERT statements, by multiple orders of magnitude. SQLAlchemy allows this extension to be used for all ``executemany()`` style -calls invoked by an :class:`.Engine` when used with :ref:`multiple parameter +calls invoked by an :class:`_engine.Engine` +when used with :ref:`multiple parameter sets <execute_multiple>`, which includes the use of this feature both by the Core as well as by the ORM for inserts of objects with non-autogenerated primary key values, by adding the ``executemany_mode`` flag to @@ -180,13 +184,15 @@ Possible options for ``executemany_mode`` include: semicolon. This is the same behavior as was provided by the ``use_batch_mode=True`` flag. -* ``'values'``- For Core :func:`~.sql.expression.insert` constructs only (including those +* ``'values'``- For Core :func:`_expression.insert` + constructs only (including those emitted by the ORM automatically), the ``psycopg2.extras.execute_values`` extension is used so that multiple parameter sets are grouped into a single INSERT statement and joined together with multiple VALUES expressions. This method requires that the string text of the VALUES clause inside the INSERT statement is manipulated, so is only supported with a compiled - :func:`~.sql.expression.insert` construct where the format is predictable. For all other + :func:`_expression.insert` construct where the format is predictable. + For all other constructs, including plain textual INSERT statements not rendered by the SQLAlchemy expression language compiler, the ``psycopg2.extras.execute_batch`` method is used. It is therefore important @@ -213,7 +219,8 @@ more appropriate:: .. seealso:: :ref:`execute_multiple` - General information on using the - :class:`.Connection` object to execute statements in such a way as to make + :class:`_engine.Connection` + object to execute statements in such a way as to make use of the DBAPI ``.executemany()`` method. .. versionchanged:: 1.3.7 - Added support for @@ -299,7 +306,8 @@ actually contain percent or parenthesis symbols; as SQLAlchemy in many cases generates bound parameter names based on the name of a column, the presence of these characters in a column name can lead to problems. -There are two solutions to the issue of a :class:`.schema.Column` that contains +There are two solutions to the issue of a :class:`_schema.Column` +that contains one of these characters in its name. One is to specify the :paramref:`.schema.Column.key` for columns that have such names:: @@ -312,10 +320,12 @@ Above, an INSERT statement such as ``measurement.insert()`` will use ``measurement.c.size_meters > 10`` will derive the bound parameter name from the ``size_meters`` key as well. -.. versionchanged:: 1.0.0 - SQL expressions will use :attr:`.Column.key` +.. versionchanged:: 1.0.0 - SQL expressions will use + :attr:`_schema.Column.key` as the source of naming when anonymous bound parameters are created in SQL expressions; previously, this behavior only applied to - :meth:`.Table.insert` and :meth:`.Table.update` parameter names. + :meth:`_schema.Table.insert` and :meth:`_schema.Table.update` + parameter names. The other solution is to use a positional format; psycopg2 allows use of the "format" paramstyle, which can be passed to @@ -352,7 +362,8 @@ As discussed in :ref:`postgresql_isolation_level`, all PostgreSQL dialects support setting of transaction isolation level both via the ``isolation_level`` parameter passed to :func:`.create_engine`, as well as the ``isolation_level`` argument used by -:meth:`.Connection.execution_options`. When using the psycopg2 dialect, these +:meth:`_engine.Connection.execution_options`. When using the psycopg2 dialect +, these options make use of psycopg2's ``set_isolation_level()`` connection method, rather than emitting a PostgreSQL directive; this is because psycopg2's API-level setting is always emitted at the start of each transaction in any |
