diff options
Diffstat (limited to 'lib/sqlalchemy/dialects')
| -rw-r--r-- | lib/sqlalchemy/dialects/mysql/__init__.py | 6 | ||||
| -rw-r--r-- | lib/sqlalchemy/dialects/mysql/base.py | 35 | ||||
| -rw-r--r-- | lib/sqlalchemy/dialects/mysql/json.py | 90 | ||||
| -rw-r--r-- | lib/sqlalchemy/dialects/postgresql/array.py | 42 | ||||
| -rw-r--r-- | lib/sqlalchemy/dialects/postgresql/base.py | 10 | ||||
| -rw-r--r-- | lib/sqlalchemy/dialects/postgresql/ext.py | 2 | ||||
| -rw-r--r-- | lib/sqlalchemy/dialects/postgresql/hstore.py | 17 | ||||
| -rw-r--r-- | lib/sqlalchemy/dialects/postgresql/json.py | 206 | ||||
| -rw-r--r-- | lib/sqlalchemy/dialects/postgresql/psycopg2.py | 1 |
9 files changed, 218 insertions, 191 deletions
diff --git a/lib/sqlalchemy/dialects/mysql/__init__.py b/lib/sqlalchemy/dialects/mysql/__init__.py index c1f78bd1d..ca204fcd1 100644 --- a/lib/sqlalchemy/dialects/mysql/__init__.py +++ b/lib/sqlalchemy/dialects/mysql/__init__.py @@ -15,7 +15,7 @@ base.dialect = mysqldb.dialect from .base import \ BIGINT, BINARY, BIT, BLOB, BOOLEAN, CHAR, DATE, DATETIME, \ DECIMAL, DOUBLE, ENUM, DECIMAL,\ - FLOAT, INTEGER, INTEGER, LONGBLOB, LONGTEXT, MEDIUMBLOB, \ + FLOAT, INTEGER, INTEGER, JSON, LONGBLOB, LONGTEXT, MEDIUMBLOB, \ MEDIUMINT, MEDIUMTEXT, NCHAR, \ NVARCHAR, NUMERIC, SET, SMALLINT, REAL, TEXT, TIME, TIMESTAMP, \ TINYBLOB, TINYINT, TINYTEXT,\ @@ -24,8 +24,8 @@ from .base import \ __all__ = ( 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', 'DECIMAL', 'DOUBLE', 'ENUM', 'DECIMAL', 'FLOAT', 'INTEGER', 'INTEGER', - 'LONGBLOB', 'LONGTEXT', 'MEDIUMBLOB', 'MEDIUMINT', 'MEDIUMTEXT', 'NCHAR', - 'NVARCHAR', 'NUMERIC', 'SET', 'SMALLINT', 'REAL', 'TEXT', 'TIME', + 'JSON', 'LONGBLOB', 'LONGTEXT', 'MEDIUMBLOB', 'MEDIUMINT', 'MEDIUMTEXT', + 'NCHAR', 'NVARCHAR', 'NUMERIC', 'SET', 'SMALLINT', 'REAL', 'TEXT', 'TIME', 'TIMESTAMP', 'TINYBLOB', 'TINYINT', 'TINYTEXT', 'VARBINARY', 'VARCHAR', 'YEAR', 'dialect' ) diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 66ef0d5e2..61c4a3fac 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -538,10 +538,11 @@ output:: import re import sys +import json from ... import schema as sa_schema from ... import exc, log, sql, util -from ...sql import compiler +from ...sql import compiler, elements from array import array as _array from ...engine import reflection @@ -559,6 +560,7 @@ from .types import BIGINT, BIT, CHAR, DECIMAL, DATETIME, \ from .types import _StringType, _IntegerType, _NumericType, \ _FloatType, _MatchType from .enumerated import ENUM, SET +from .json import JSON, JSONIndexType, JSONPathType RESERVED_WORDS = set( @@ -663,7 +665,11 @@ colspecs = { sqltypes.Float: FLOAT, sqltypes.Time: TIME, sqltypes.Enum: ENUM, - sqltypes.MatchType: _MatchType + sqltypes.MatchType: _MatchType, + sqltypes.JSON: JSON, + sqltypes.JSON.JSONIndexType: JSONIndexType, + sqltypes.JSON.JSONPathType: JSONPathType + } # Everything 3.23 through 5.1 excepting OpenGIS types. @@ -683,6 +689,7 @@ ischema_names = { 'float': FLOAT, 'int': INTEGER, 'integer': INTEGER, + 'json': JSON, 'longblob': LONGBLOB, 'longtext': LONGTEXT, 'mediumblob': MEDIUMBLOB, @@ -728,6 +735,16 @@ class MySQLCompiler(compiler.SQLCompiler): def visit_sysdate_func(self, fn, **kw): return "SYSDATE()" + def visit_json_getitem_op_binary(self, binary, operator, **kw): + return "JSON_EXTRACT(%s, %s)" % ( + self.process(binary.left), + self.process(binary.right)) + + def visit_json_path_getitem_op_binary(self, binary, operator, **kw): + return "JSON_EXTRACT(%s, %s)" % ( + self.process(binary.left), + self.process(binary.right)) + def visit_concat_op_binary(self, binary, operator, **kw): return "concat(%s, %s)" % (self.process(binary.left), self.process(binary.right)) @@ -760,6 +777,8 @@ class MySQLCompiler(compiler.SQLCompiler): return self.dialect.type_compiler.process(adapted) elif isinstance(type_, sqltypes._Binary): return 'BINARY' + elif isinstance(type_, sqltypes.JSON): + return "JSON" elif isinstance(type_, sqltypes.NUMERIC): return self.dialect.type_compiler.process( type_).replace('NUMERIC', 'DECIMAL') @@ -1275,6 +1294,9 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): def visit_VARBINARY(self, type_, **kw): return "VARBINARY(%d)" % type_.length + def visit_JSON(self, type_, **kw): + return "JSON" + def visit_large_binary(self, type_, **kw): return self.visit_BLOB(type_) @@ -1394,10 +1416,13 @@ class MySQLDialect(default.DefaultDialect): }) ] - def __init__(self, isolation_level=None, **kwargs): + def __init__(self, isolation_level=None, json_serializer=None, + json_deserializer=None, **kwargs): kwargs.pop('use_ansiquotes', None) # legacy default.DefaultDialect.__init__(self, **kwargs) self.isolation_level = isolation_level + self._json_serializer = json_serializer + self._json_deserializer = json_deserializer def on_connect(self): if self.isolation_level is not None: @@ -1564,6 +1589,10 @@ class MySQLDialect(default.DefaultDialect): default.DefaultDialect.initialize(self, connection) @property + def _is_mariadb(self): + return 'MariaDB' in self.server_version_info + + @property def _supports_cast(self): return self.server_version_info is None or \ self.server_version_info >= (4, 0, 2) diff --git a/lib/sqlalchemy/dialects/mysql/json.py b/lib/sqlalchemy/dialects/mysql/json.py new file mode 100644 index 000000000..a30cdc841 --- /dev/null +++ b/lib/sqlalchemy/dialects/mysql/json.py @@ -0,0 +1,90 @@ +# mysql/json.py +# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors +# <see AUTHORS file> +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from __future__ import absolute_import + +import json + +from ...sql import elements +from ... import types as sqltypes +from ... import util + + +class JSON(sqltypes.JSON): + """MySQL JSON type. + + MySQL supports JSON as of version 5.7. Note that MariaDB does **not** + support JSON at the time of this writing. + + The :class:`.mysql.JSON` type supports persistence of JSON values + as well as the core index operations provided by :class:`.types.JSON` + datatype, by adapting the operations to render the ``JSON_EXTRACT`` + function at the database level. + + .. versionadded:: 1.1 + + """ + + @util.memoized_property + def _str_impl(self): + return sqltypes.String(convert_unicode=True) + + def bind_processor(self, dialect): + string_process = self._str_impl.bind_processor(dialect) + + json_serializer = dialect._json_serializer or json.dumps + + def process(value): + if value is self.NULL: + value = None + elif isinstance(value, elements.Null) or ( + value is None and self.none_as_null + ): + return None + + serialized = json_serializer(value) + if string_process: + serialized = string_process(serialized) + return serialized + + return process + + def result_processor(self, dialect, coltype): + string_process = self._str_impl.result_processor(dialect, coltype) + json_deserializer = dialect._json_deserializer or json.loads + + def process(value): + if value is None: + return None + if string_process: + value = string_process(value) + return json_deserializer(value) + return process + + +class JSONIndexType(sqltypes.JSON.JSONIndexType): + def bind_processor(self, dialect): + def process(value): + if isinstance(value, int): + return "$[%s]" % value + else: + return '$."%s"' % value + + return process + + +class JSONPathType(sqltypes.JSON.JSONPathType): + def bind_processor(self, dialect): + def process(value): + return "$%s" % ( + "".join([ + "[%s]" % elem if isinstance(elem, int) + else '."%s"' % elem for elem in value + ]) + ) + + return process diff --git a/lib/sqlalchemy/dialects/postgresql/array.py b/lib/sqlalchemy/dialects/postgresql/array.py index b88f139de..f4316d318 100644 --- a/lib/sqlalchemy/dialects/postgresql/array.py +++ b/lib/sqlalchemy/dialects/postgresql/array.py @@ -84,12 +84,20 @@ class array(expression.Tuple): super(array, self).__init__(*clauses, **kw) self.type = ARRAY(self.type) - def _bind_param(self, operator, obj): - return array([ - expression.BindParameter(None, o, _compared_to_operator=operator, - _compared_to_type=self.type, unique=True) - for o in obj - ]) + def _bind_param(self, operator, obj, _assume_scalar=False, type_=None): + if _assume_scalar or operator is operators.getitem: + # if getitem->slice were called, Indexable produces + # a Slice object from that + assert isinstance(obj, int) + return expression.BindParameter( + None, obj, _compared_to_operator=operator, + type_=type_, + _compared_to_type=self.type, unique=True) + + else: + return array([ + self._bind_param(operator, o, _assume_scalar=True, type_=type_) + for o in obj]) def self_group(self, against=None): if (against in ( @@ -106,15 +114,15 @@ CONTAINED_BY = operators.custom_op("<@", precedence=5) OVERLAP = operators.custom_op("&&", precedence=5) -class ARRAY(SchemaEventTarget, sqltypes.Array): +class ARRAY(SchemaEventTarget, sqltypes.ARRAY): """Postgresql ARRAY type. .. versionchanged:: 1.1 The :class:`.postgresql.ARRAY` type is now - a subclass of the core :class:`.Array` type. + a subclass of the core :class:`.types.ARRAY` type. The :class:`.postgresql.ARRAY` type is constructed in the same way - as the core :class:`.Array` type; a member type is required, and a + as the core :class:`.types.ARRAY` type; a member type is required, and a number of dimensions is recommended if the type is to be used for more than one dimension:: @@ -125,9 +133,9 @@ class ARRAY(SchemaEventTarget, sqltypes.Array): ) The :class:`.postgresql.ARRAY` type provides all operations defined on the - core :class:`.Array` type, including support for "dimensions", indexed - access, and simple matching such as :meth:`.Array.Comparator.any` - and :meth:`.Array.Comparator.all`. :class:`.postgresql.ARRAY` class also + core :class:`.types.ARRAY` type, including support for "dimensions", indexed + access, and simple matching such as :meth:`.types.ARRAY.Comparator.any` + and :meth:`.types.ARRAY.Comparator.all`. :class:`.postgresql.ARRAY` class also provides PostgreSQL-specific methods for containment operations, including :meth:`.postgresql.ARRAY.Comparator.contains` :meth:`.postgresql.ARRAY.Comparator.contained_by`, @@ -144,20 +152,20 @@ class ARRAY(SchemaEventTarget, sqltypes.Array): .. seealso:: - :class:`.types.Array` - base array type + :class:`.types.ARRAY` - base array type :class:`.postgresql.array` - produces a literal array value. """ - class Comparator(sqltypes.Array.Comparator): + class Comparator(sqltypes.ARRAY.Comparator): """Define comparison operations for :class:`.ARRAY`. Note that these operations are in addition to those provided - by the base :class:`.types.Array.Comparator` class, including - :meth:`.types.Array.Comparator.any` and - :meth:`.types.Array.Comparator.all`. + by the base :class:`.types.ARRAY.Comparator` class, including + :meth:`.types.ARRAY.Comparator.any` and + :meth:`.types.ARRAY.Comparator.all`. """ diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index e9001f79a..3f9fcb27f 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1056,6 +1056,16 @@ class PGCompiler(compiler.SQLCompiler): self.process(element.stop, **kw), ) + def visit_json_getitem_op_binary(self, binary, operator, **kw): + return self._generate_generic_binary( + binary, " -> ", **kw + ) + + def visit_json_path_getitem_op_binary(self, binary, operator, **kw): + return self._generate_generic_binary( + binary, " #> ", **kw + ) + def visit_getitem_binary(self, binary, operator, **kw): return "%s[%s]" % ( self.process(binary.left, **kw), diff --git a/lib/sqlalchemy/dialects/postgresql/ext.py b/lib/sqlalchemy/dialects/postgresql/ext.py index 1a443c2d7..66c7ed0e5 100644 --- a/lib/sqlalchemy/dialects/postgresql/ext.py +++ b/lib/sqlalchemy/dialects/postgresql/ext.py @@ -159,7 +159,7 @@ static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE def array_agg(*arg, **kw): """Postgresql-specific form of :class:`.array_agg`, ensures return type is :class:`.postgresql.ARRAY` and not - the plain :class:`.types.Array`. + the plain :class:`.types.ARRAY`. .. versionadded:: 1.1 diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py index b7b0fc007..d2d20386a 100644 --- a/lib/sqlalchemy/dialects/postgresql/hstore.py +++ b/lib/sqlalchemy/dialects/postgresql/hstore.py @@ -12,34 +12,33 @@ from .array import ARRAY from ... import types as sqltypes from ...sql import functions as sqlfunc from ...sql import operators -from ...sql.operators import custom_op from ... import util __all__ = ('HSTORE', 'hstore') -INDEX = custom_op( - "->", precedence=5, natural_self_precedent=True +GETITEM = operators.custom_op( + "->", precedence=15, natural_self_precedent=True, ) HAS_KEY = operators.custom_op( - "?", precedence=5, natural_self_precedent=True + "?", precedence=15, natural_self_precedent=True ) HAS_ALL = operators.custom_op( - "?&", precedence=5, natural_self_precedent=True + "?&", precedence=15, natural_self_precedent=True ) HAS_ANY = operators.custom_op( - "?|", precedence=5, natural_self_precedent=True + "?|", precedence=15, natural_self_precedent=True ) CONTAINS = operators.custom_op( - "@>", precedence=5, natural_self_precedent=True + "@>", precedence=15, natural_self_precedent=True ) CONTAINED_BY = operators.custom_op( - "<@", precedence=5, natural_self_precedent=True + "<@", precedence=15, natural_self_precedent=True ) @@ -166,7 +165,7 @@ class HSTORE(sqltypes.Indexable, sqltypes.Concatenable, sqltypes.TypeEngine): CONTAINED_BY, other, result_type=sqltypes.Boolean) def _setup_getitem(self, index): - return INDEX, index, self.type.text_type + return GETITEM, index, self.type.text_type def defined(self, key): """Boolean expression. Test for presence of a non-NULL value for diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py index 8a50270f5..6ff9fd88e 100644 --- a/lib/sqlalchemy/dialects/postgresql/json.py +++ b/lib/sqlalchemy/dialects/postgresql/json.py @@ -6,10 +6,10 @@ # the MIT License: http://www.opensource.org/licenses/mit-license.php from __future__ import absolute_import -import collections import json +import collections -from .base import ischema_names +from .base import ischema_names, colspecs from ... import types as sqltypes from ...sql import operators from ...sql import elements @@ -17,70 +17,68 @@ from ... import util __all__ = ('JSON', 'JSONB') - -# json : returns json -INDEX = operators.custom_op( - "->", precedence=5, natural_self_precedent=True -) - -# path operator: returns json -PATHIDX = operators.custom_op( - "#>", precedence=5, natural_self_precedent=True -) - -# json + astext: returns text ASTEXT = operators.custom_op( - "->>", precedence=5, natural_self_precedent=True + "->>", precedence=15, natural_self_precedent=True, ) -# path operator + astext: returns text -ASTEXT_PATHIDX = operators.custom_op( - "#>>", precedence=5, natural_self_precedent=True +JSONPATH_ASTEXT = operators.custom_op( + "#>>", precedence=15, natural_self_precedent=True, ) + HAS_KEY = operators.custom_op( - "?", precedence=5, natural_self_precedent=True + "?", precedence=15, natural_self_precedent=True ) HAS_ALL = operators.custom_op( - "?&", precedence=5, natural_self_precedent=True + "?&", precedence=15, natural_self_precedent=True ) HAS_ANY = operators.custom_op( - "?|", precedence=5, natural_self_precedent=True + "?|", precedence=15, natural_self_precedent=True ) CONTAINS = operators.custom_op( - "@>", precedence=5, natural_self_precedent=True + "@>", precedence=15, natural_self_precedent=True ) CONTAINED_BY = operators.custom_op( - "<@", precedence=5, natural_self_precedent=True + "<@", precedence=15, natural_self_precedent=True ) -class JSON(sqltypes.Indexable, sqltypes.TypeEngine): - """Represent the Postgresql JSON type. +class JSONPathType(sqltypes.JSON.JSONPathType): + def bind_processor(self, dialect): + def process(value): + assert isinstance(value, collections.Sequence) + tokens = [util.text_type(elem) for elem in value] + return "{%s}" % (", ".join(tokens)) - The :class:`.JSON` type stores arbitrary JSON format data, e.g.:: + return process - data_table = Table('data_table', metadata, - Column('id', Integer, primary_key=True), - Column('data', JSON) - ) +colspecs[sqltypes.JSON.JSONPathType] = JSONPathType - with engine.connect() as conn: - conn.execute( - data_table.insert(), - data = {"key1": "value1", "key2": "value2"} - ) - :class:`.JSON` provides several operations: +class JSON(sqltypes.JSON): + """Represent the Postgresql JSON type. + + This type is a specialization of the Core-level :class:`.types.JSON` + type. Be sure to read the documentation for :class:`.types.JSON` for + important tips regarding treatment of NULL values and ORM use. + + .. versionchanged:: 1.1 :class:`.postgresql.JSON` is now a Postgresql- + specific specialization of the new :class:`.types.JSON` type. + + The operators provided by the Postgresql version of :class:`.JSON` + include: * Index operations (the ``->`` operator):: data_table.c.data['some key'] + data_table.c.data[5] + + * Index operations returning text (the ``->>`` operator):: data_table.c.data['some key'].astext == 'some value' @@ -92,11 +90,11 @@ class JSON(sqltypes.Indexable, sqltypes.TypeEngine): * Path index operations (the ``#>`` operator):: - data_table.c.data[('key_1', 'key_2', ..., 'key_n')] + data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')] * Path index operations returning text (the ``#>>`` operator):: - data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == \ + data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')].astext == \ 'some value' .. versionchanged:: 1.1 The :meth:`.ColumnElement.cast` operator on @@ -108,36 +106,6 @@ class JSON(sqltypes.Indexable, sqltypes.TypeEngine): :class:`.JSON` by default, so that further JSON-oriented instructions may be called upon the result type. - The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not - detect in-place mutations to the structure. In order to detect these, the - :mod:`sqlalchemy.ext.mutable` extension must be used. This extension will - allow "in-place" changes to the datastructure to produce events which - will be detected by the unit of work. See the example at :class:`.HSTORE` - for a simple example involving a dictionary. - - When working with NULL values, the :class:`.JSON` type recommends the - use of two specific constants in order to differentiate between a column - that evaluates to SQL NULL, e.g. no value, vs. the JSON-encoded string - of ``"null"``. To insert or select against a value that is SQL NULL, - use the constant :func:`.null`:: - - conn.execute(table.insert(), json_value=null()) - - To insert or select against a value that is JSON ``"null"``, use the - constant :attr:`.JSON.NULL`:: - - conn.execute(table.insert(), json_value=JSON.NULL) - - The :class:`.JSON` type supports a flag - :paramref:`.JSON.none_as_null` which when set to True will result - in the Python constant ``None`` evaluating to the value of SQL - NULL, and when set to False results in the Python constant - ``None`` evaluating to the value of JSON ``"null"``. The Python - value ``None`` may be used in conjunction with either - :attr:`.JSON.NULL` and :func:`.null` in order to indicate NULL - values, but care must be taken as to the value of the - :paramref:`.JSON.none_as_null` in these cases. - Custom serializers and deserializers are specified at the dialect level, that is using :func:`.create_engine`. The reason for this is that when using psycopg2, the DBAPI only allows serializers at the per-cursor @@ -151,43 +119,16 @@ class JSON(sqltypes.Indexable, sqltypes.TypeEngine): When using the psycopg2 dialect, the json_deserializer is registered against the database using ``psycopg2.extras.register_default_json``. - .. versionadded:: 0.9 - .. seealso:: + :class:`.types.JSON` - Core level JSON type + :class:`.JSONB` """ - __visit_name__ = 'JSON' - - hashable = False astext_type = sqltypes.Text() - NULL = util.symbol('JSON_NULL') - """Describe the json value of NULL. - - This value is used to force the JSON value of ``"null"`` to be - used as the value. A value of Python ``None`` will be recognized - either as SQL NULL or JSON ``"null"``, based on the setting - of the :paramref:`.JSON.none_as_null` flag; the :attr:`.JSON.NULL` - constant can be used to always resolve to JSON ``"null"`` regardless - of this setting. This is in contrast to the :func:`.sql.null` construct, - which always resolves to SQL NULL. E.g.:: - - from sqlalchemy import null - from sqlalchemy.dialects.postgresql import JSON - - obj1 = MyObject(json_value=null()) # will *always* insert SQL NULL - obj2 = MyObject(json_value=JSON.NULL) # will *always* insert JSON string "null" - - session.add_all([obj1, obj2]) - session.commit() - - .. versionadded:: 1.1 - - """ - def __init__(self, none_as_null=False, astext_type=None): """Construct a :class:`.JSON` type. @@ -210,15 +151,14 @@ class JSON(sqltypes.Indexable, sqltypes.TypeEngine): :attr:`.JSON.Comparator.astext` accessor on indexed attributes. Defaults to :class:`.types.Text`. - .. versionadded:: 1.1.0 + .. versionadded:: 1.1 """ - self.none_as_null = none_as_null + super(JSON, self).__init__(none_as_null=none_as_null) if astext_type is not None: self.astext_type = astext_type - class Comparator( - sqltypes.Indexable.Comparator, sqltypes.Concatenable.Comparator): + class Comparator(sqltypes.JSON.Comparator): """Define comparison operations for :class:`.JSON`.""" @property @@ -235,69 +175,19 @@ class JSON(sqltypes.Indexable, sqltypes.TypeEngine): :meth:`.ColumnElement.cast` """ - against = self.expr.operator - if against is PATHIDX: - against = ASTEXT_PATHIDX - else: - against = ASTEXT - return self.expr.left.operate( - against, self.expr.right, result_type=self.type.astext_type) - - def _setup_getitem(self, index): - if not isinstance(index, util.string_types): - assert isinstance(index, collections.Sequence) - tokens = [util.text_type(elem) for elem in index] - index = "{%s}" % (", ".join(tokens)) - operator = PATHIDX + if isinstance(self.expr.right.type, sqltypes.JSON.JSONPathType): + return self.expr.left.operate( + JSONPATH_ASTEXT, + self.expr.right, result_type=self.type.astext_type) else: - operator = INDEX - - return operator, index, self.type + return self.expr.left.operate( + ASTEXT, self.expr.right, result_type=self.type.astext_type) comparator_factory = Comparator - @property - def should_evaluate_none(self): - return not self.none_as_null - - def bind_processor(self, dialect): - json_serializer = dialect._json_serializer or json.dumps - if util.py2k: - encoding = dialect.encoding - else: - encoding = None - - def process(value): - if value is self.NULL: - value = None - elif isinstance(value, elements.Null) or ( - value is None and self.none_as_null - ): - return None - if encoding: - return json_serializer(value).encode(encoding) - else: - return json_serializer(value) - - return process - - def result_processor(self, dialect, coltype): - json_deserializer = dialect._json_deserializer or json.loads - if util.py2k: - encoding = dialect.encoding - else: - encoding = None - - def process(value): - if value is None: - return None - if encoding: - value = value.decode(encoding) - return json_deserializer(value) - return process - +colspecs[sqltypes.JSON] = JSON ischema_names['json'] = JSON diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index d33554922..82fcc9054 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -534,6 +534,7 @@ class PGDialect_psycopg2(PGDialect): sqltypes.Enum: _PGEnum, # needs force_unicode HSTORE: _PGHStore, JSON: _PGJSON, + sqltypes.JSON: _PGJSON, JSONB: _PGJSONB, UUID: _PGUUID } |
