diff options
66 files changed, 2461 insertions, 1764 deletions
diff --git a/.github/workflows/create-wheels.yaml b/.github/workflows/create-wheels.yaml new file mode 100644 index 000000000..02bf73b43 --- /dev/null +++ b/.github/workflows/create-wheels.yaml @@ -0,0 +1,284 @@ +name: Create wheel + +on: + # run when a release has been created + release: + types: [created] + +env: + # set this so the sqlalchemy test uses the installed version and not the local one + PYTHONNOUSERSITE: 1 + # comment TWINE_REPOSITORY_URL to use the real pypi. NOTE: change also the secret used in TWINE_PASSWORD + TWINE_REPOSITORY_URL: https://test.pypi.org/legacy/ + +jobs: + # two jobs are defined make-wheel-win-osx and make-wheel-linux. + # they do the the same steps, but linux wheels need to be build to target manylinux + make-wheel-win-osx: + name: ${{ matrix.python-version }}-${{ matrix.architecture }}-${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: + - "windows-latest" + - "macos-latest" + python-version: + - "2.7" + - "3.5" + - "3.6" + - "3.7" + - "3.8" + architecture: + - x64 + - x86 + + include: + - python-version: "2.7" + extra-requires: "mock" + + exclude: + - os: "macos-latest" + architecture: x86 + + fail-fast: false + + steps: + - name: Checkout repo + uses: actions/checkout@v2 + + - name: Set up Python + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + architecture: ${{ matrix.architecture }} + + - name: Remove tag_build from setup.cfg + # sqlalchemy has `tag_build` set to `dev` in setup.cfg. We need to remove it before creating the weel + # otherwise it gets tagged with `dev0` + shell: pwsh + # This is equivalent to the sed commands: + # `sed -i '/tag_build=dev/d' setup.cfg` + # `sed -i '/tag_build = dev/d' setup.cfg` + + # `-replace` uses a regexp match + # alternative form: `(get-content setup.cfg) | foreach-object{$_ -replace "tag_build.=.dev",""} | set-content setup.cfg` + run: | + (cat setup.cfg) | %{$_ -replace "tag_build.?=.?dev",""} | set-content setup.cfg + + - name: Create wheel + # create the wheel using --no-use-pep517 since locally we have pyproject + # this flag should be removed once sqlalchemy supports pep517 + # `--no-deps` is used to only generate the wheel for the current library. Redundant in sqlalchemy since it has no dependencies + run: | + python -m pip install --upgrade pip + pip --version + pip install setuptools wheel + pip wheel -w dist --no-use-pep517 -v --no-deps . + + - name: Install wheel + # install the created wheel without using the pypi index + run: | + pip install -f dist --no-index sqlalchemy + + - name: Check c extensions + # on windows in python 2.7 and 3.5 the cextension fail to build. + # for python 2.7 visual studio 9 is missing + # for python 3.5 the linker has an error "cannot run 'rc.exe'" + if: matrix.os != 'windows-latest' || ( matrix.python-version != '2.7' && matrix.python-version != '3.5' ) + run: | + python -c 'from sqlalchemy import cprocessors, cresultproxy, cutils' + + - name: Test created wheel + # the mock reconnect test seems to fail on the ci in windows + run: | + pip install pytest pytest-xdist ${{ matrix.extra-requires }} + pytest -n2 -q test -k 'not MockReconnectTest' --nomemory + + - name: Get wheel name + id: wheel-name + shell: bash + # creates output from https://github.community/t5/GitHub-Actions/Using-the-output-of-run-inside-of-if-condition/td-p/33920 + run: | + echo ::set-output name=wheel::`ls dist` + + - name: Upload wheel to release + # upload the generated wheel to the github release. + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + # this is a release to the event is called release: https://help.github.com/en/actions/reference/events-that-trigger-workflows#release-event-release + # the release event has the structure of this response https://developer.github.com/v3/repos/releases/#create-a-release + upload_url: ${{ github.event.release.upload_url }} + asset_path: dist/${{ steps.wheel-name.outputs.wheel }} + asset_name: ${{ steps.wheel-name.outputs.wheel }} + asset_content_type: application/zip # application/octet-stream + + - name: Set up Python for twine + # twine on py2 is very old and is no longer updated, so we change to python 3.8 before upload + uses: actions/setup-python@v1 + with: + python-version: "3.8" + + - name: Publish wheel + # the action https://github.com/marketplace/actions/pypi-publish runs only on linux and we cannot specify + # additional options + env: + TWINE_USERNAME: __token__ + # replace TWINE_PASSWORD with token for real pypi + TWINE_PASSWORD: ${{ secrets.test_pypi_token }} + run: | + pip install -U twine + twine upload --skip-existing dist/* + + make-wheel-linux: + name: ${{ matrix.python-version }}-${{ matrix.architecture }}-${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: + - "ubuntu-latest" + python-version: + # the versions are <python tag>-<abi tag> as specified in PEP 425. + - cp27-cp27m + - cp27-cp27mu + - cp35-cp35m + - cp36-cp36m + - cp37-cp37m + - cp38-cp38 + architecture: + - x64 + + include: + - python-version: "cp27-cp27m" + extra-requires: "mock" + - python-version: "cp27-cp27mu" + extra-requires: "mock" + + fail-fast: false + + steps: + - name: Checkout repo + uses: actions/checkout@v2 + + - name: Get python version + id: linux-py-version + env: + py_tag: ${{ matrix.python-version }} + # the command `echo "::set-output ...` is used to create an step output that can be used in following steps + # this is from https://github.community/t5/GitHub-Actions/Using-the-output-of-run-inside-of-if-condition/td-p/33920 + run: | + version="${py_tag: 2:1}.${py_tag: 3:1}" + echo $version + echo "::set-output name=python-version::$version" + + - name: Set up Python + uses: actions/setup-python@v1 + with: + python-version: ${{ steps.linux-py-version.outputs.python-version }} + architecture: ${{ matrix.architecture }} + + - name: Remove tag_build from setup.cfg + # sqlalchemy has `tag_build` set to `dev` in setup.cfg. We need to remove it before creating the weel + # otherwise it gets tagged with `dev0` + shell: pwsh + # This is equivalent to the sed commands: + # `sed -i '/tag_build=dev/d' setup.cfg` + # `sed -i '/tag_build = dev/d' setup.cfg` + + # `-replace` uses a regexp match + # alternative form: `(get-content setup.cfg) | foreach-object{$_ -replace "tag_build.=.dev",""} | set-content setup.cfg` + run: | + (cat setup.cfg) | %{$_ -replace "tag_build.?=.?dev",""} | set-content setup.cfg + + - name: Create wheel for manylinux + # this step uses the image provided by pypa here https://github.com/pypa/manylinux to generate the wheels on linux + # the action uses the image for manylinux2010 but can generate also a manylinux1 wheel + # change the tag of this image to change the image used + # NOTE: the output folder is "wheelhouse", not the classic "dist" + uses: RalfG/python-wheels-manylinux-build@v0.2.2-manylinux2010_x86_64 + # this action generates 3 wheels in wheelhouse/. linux, manylinux1 and manylinux2010 + with: + # python-versions is the output of the previous step and is in the form <python tag>-<abi tag>. Eg cp37-cp37mu + python-versions: ${{ matrix.python-version }} + build-requirements: "setuptools wheel" + # Create the wheel using --no-use-pep517 since locally we have pyproject + # This flag should be removed once sqlalchemy supports pep517 + # `--no-deps` is used to only generate the wheel for the current library. Redundant in sqlalchemy since it has no dependencies + pip-wheel-args: "--no-use-pep517 -v --no-deps" + + - name: Check created wheel + # check that the wheel is compatible with the current installation. + # If it is then does: + # - install the created wheel without using the pypi index + # - check the c extension + # - runs the tests + run: | + pip install -q wheel + version=`python -W ignore -c 'from wheel.pep425tags import get_abbr_impl, get_impl_ver, get_abi_tag; print("{0}{1}-{2}".format(get_abbr_impl(), get_impl_ver(), get_abi_tag()))'` + echo Wheel tag ${{ matrix.python-version }}. Installed version $version. + if [[ "${{ matrix.python-version }}" = "$version" ]] + then + pip install -f wheelhouse --no-index sqlalchemy + python -c 'from sqlalchemy import cprocessors, cresultproxy, cutils' + pip install pytest pytest-xdist ${{ matrix.extra-requires }} + pytest -n2 -q test -k 'not MockReconnectTest' --nomemory + else + echo Not compatible. Skipping install. + fi + + - name: Get wheel names + id: wheel-name + shell: bash + # the wheel creation step generates 3 wheels: linux, manylinux1 and manylinux2010 + # Pypi accepts only the manylinux versions + run: | + cd wheelhouse + echo ::set-output name=wheel1::`ls *manylinux1*` + echo ::set-output name=wheel2010::`ls *manylinux2010*` + + - name: Upload wheel manylinux1 to release + # upload the generated manylinux1 wheel to the github release. Only a single file per step can be uploaded at the moment + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + # this is a release to the event is called release: https://help.github.com/en/actions/reference/events-that-trigger-workflows#release-event-release + # the release event has the structure of this response https://developer.github.com/v3/repos/releases/#create-a-release + upload_url: ${{ github.event.release.upload_url }} + asset_path: wheelhouse/${{ steps.wheel-name.outputs.wheel1 }} + asset_name: ${{ steps.wheel-name.outputs.wheel1 }} + asset_content_type: application/zip # application/octet-stream + + - name: Upload wheel manylinux2010 to release + # upload the generated manylinux2010 wheel to the github release. Only a single file per step can be uploaded at the moment + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + # this is a release to the event is called release: https://help.github.com/en/actions/reference/events-that-trigger-workflows#release-event-release + # the release event has the structure of this response https://developer.github.com/v3/repos/releases/#create-a-release + upload_url: ${{ github.event.release.upload_url }} + asset_path: wheelhouse/${{ steps.wheel-name.outputs.wheel2010 }} + asset_name: ${{ steps.wheel-name.outputs.wheel2010 }} + asset_content_type: application/zip # application/octet-stream + + - name: Set up Python for twine + # twine on py2 is very old and is no longer updated, so we change to python 3.8 before upload + uses: actions/setup-python@v1 + with: + python-version: "3.8" + + - name: Publish wheel + # the action https://github.com/marketplace/actions/pypi-publish runs only on linux and we cannot specify + # additional options + # We upload both manylinux1 and manylinux2010 wheels. pip will download the appropriate one according to the system. + # manylinux1 is an older format and is now not very used since many environments can use manylinux2010 + # currently (April 2020) manylinux2014 is still wip, so we do not generate it. + env: + TWINE_USERNAME: __token__ + # replace TWINE_PASSWORD with token for real pypi + TWINE_PASSWORD: ${{ secrets.test_pypi_token }} + run: | + pip install -U twine + twine upload --skip-existing wheelhouse/*manylinux* diff --git a/doc/build/changelog/README.txt b/doc/build/changelog/README.txt new file mode 100644 index 000000000..65ee529dd --- /dev/null +++ b/doc/build/changelog/README.txt @@ -0,0 +1,34 @@ +Individual per-changelog files are placed into their corresponding release's +directory (for example: changelog files for the `1.4` branch are placed in the +`./unreleased_14/` directory, such as `./unreleased_14/4710.rst`). + +Files are in `.rst` format, which are pulled in by +changelog (https://github.com/sqlalchemyorg/changelog; version 0.4.0 or higher) +to be rendered into their corresponding `changelog_xx.rst` file +(for example: `./changelog_14.rst`). At release time, the files in the +`unreleased_xx/` directory are removed and written directly into the changelog. + +Rationale is so that multiple changes being merged into Gerrit don't produce +conflicts. Note that Gerrit does not support alternate merge handlers unlike +git itself (and the alternate merge handlers don't work that well either). + +Each changelog file should be named `{ID}.rst`, wherein `ID` is the unique +identifier of the issue in the Github issue tracker. + +In the example below, the `tags` and `tickets` contain a comma-separated listing +because there are more than one element. + +================================================================================ + Example Below +================================================================================ + + +.. change:: + :tags: bug, sql, orm + :tickets: 4839, 3257 + + Please use reStructuredText and Sphinx markup when possible. For example + method :meth:`.Index.create` and parameter + :paramref:`.Index.create.checkfirst`, and :class:`.Table` which will + subject to the relevant markup. Also please note the indentions required + for the text. diff --git a/doc/build/changelog/changelog_07.rst b/doc/build/changelog/changelog_07.rst index f921d2949..43ae2fe0d 100644 --- a/doc/build/changelog/changelog_07.rst +++ b/doc/build/changelog/changelog_07.rst @@ -2578,7 +2578,7 @@ The behavior of =/!= when comparing a scalar select to a value will no longer produce IN/NOT IN as of 0.8; - this behavior is a little too heavy handed (use in_() if + this behavior is a little too heavy handed (use ``in_()`` if you want to emit IN) and now emits a deprecation warning. To get the 0.8 behavior immediately and remove the warning, a compiler recipe is given at diff --git a/doc/build/changelog/changelog_08.rst b/doc/build/changelog/changelog_08.rst index 90407b2b2..7e0994dfc 100644 --- a/doc/build/changelog/changelog_08.rst +++ b/doc/build/changelog/changelog_08.rst @@ -356,9 +356,9 @@ :tickets: 2957 :versions: 0.9.3 - Fixed bug where :meth:`.in_()` would go into an endless loop if - erroneously passed a column expression whose comparator included - the ``__getitem__()`` method, such as a column that uses the + Fixed bug where :meth:`.ColumnOperators.in_()` would go into an endless + loop if erroneously passed a column expression whose comparator + included the ``__getitem__()`` method, such as a column that uses the :class:`.postgresql.ARRAY` type. .. change:: diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 7f73fa9e4..195f0f498 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -1006,8 +1006,8 @@ :tickets: 3459 Added a :meth:`.ColumnElement.cast` method which performs the same - purpose as the standalone :func:`.cast` function. Pull request - courtesy Sebastian Bank. + purpose as the standalone :func:`.expression.cast` function. Pull + request courtesy Sebastian Bank. .. change:: :tags: bug, engine @@ -1181,7 +1181,7 @@ Repaired the :class:`.ExcludeConstraint` construct to support common features that other objects like :class:`.Index` now do, that the column expression may be specified as an arbitrary SQL - expression such as :obj:`.cast` or :obj:`.text`. + expression such as :obj:`.expression.cast` or :obj:`.expression.text`. .. change:: :tags: feature, postgresql @@ -1206,14 +1206,13 @@ :tags: bug, orm :tickets: 3448 - Fixed an unexpected-use regression whereby custom :class:`.Comparator` - objects that made use of the ``__clause_element__()`` method and - returned an object that was an ORM-mapped - :class:`.InstrumentedAttribute` and not explicitly a - :class:`.ColumnElement` would fail to be correctly - handled when passed as an expression to :meth:`.Session.query`. - The logic in 0.9 happened to succeed on this, so this use case is now - supported. + Fixed an unexpected-use regression whereby custom + :class:`.types.TypeEngine.Comparator` objects that made use of the + ``__clause_element__()`` method and returned an object that was an + ORM-mapped :class:`.InstrumentedAttribute` and not explicitly a + :class:`.ColumnElement` would fail to be correctly handled when passed + as an expression to :meth:`.Session.query`. The logic in 0.9 happened + to succeed on this, so this use case is now supported. .. change:: :tags: bug, sql diff --git a/doc/build/changelog/changelog_11.rst b/doc/build/changelog/changelog_11.rst index d4eef6811..c50322ad1 100644 --- a/doc/build/changelog/changelog_11.rst +++ b/doc/build/changelog/changelog_11.rst @@ -22,10 +22,6 @@ .. changelog:: - :version: 1.1.19 - :include_notes_from: unreleased_11 - -.. changelog:: :version: 1.1.18 :released: March 6, 2018 diff --git a/doc/build/changelog/changelog_12.rst b/doc/build/changelog/changelog_12.rst index 4d8d41bbb..e5c145642 100644 --- a/doc/build/changelog/changelog_12.rst +++ b/doc/build/changelog/changelog_12.rst @@ -13,10 +13,6 @@ .. changelog:: - :version: 1.2.20 - :include_notes_from: unreleased_12 - -.. changelog:: :version: 1.2.19 :released: April 15, 2019 @@ -882,15 +878,15 @@ :released: May 28, 2018 .. change:: - :tags: bug, orm - :tickets: 4256 + :tags: bug, orm + :tickets: 4256 - Fixed regression in 1.2.7 caused by :ticket:`4228`, which itself was fixing - a 1.2-level regression, where the ``query_cls`` callable passed to a - :class:`.Session` was assumed to be a subclass of :class:`.Query` with - class method availability, as opposed to an arbitrary callable. In - particular, the dogpile caching example illustrates ``query_cls`` as a - function and not a :class:`.Query` subclass. + Fixed regression in 1.2.7 caused by :ticket:`4228`, which itself was fixing + a 1.2-level regression, where the ``query_cls`` callable passed to a + :class:`.Session` was assumed to be a subclass of :class:`.Query` with + class method availability, as opposed to an arbitrary callable. In + particular, the dogpile caching example illustrates ``query_cls`` as a + function and not a :class:`.Query` subclass. .. change:: :tags: bug, engine @@ -928,15 +924,15 @@ .. change:: - :tags: bug, ext - :tickets: 4247 + :tags: bug, ext + :tickets: 4247 - The horizontal sharding extension now makes use of the identity token - added to ORM identity keys as part of :ticket:`4137`, when an object - refresh or column-based deferred load or unexpiration operation occurs. - Since we know the "shard" that the object originated from, we make - use of this value when refreshing, thereby avoiding queries against - other shards that don't match this object's identity in any case. + The horizontal sharding extension now makes use of the identity token + added to ORM identity keys as part of :ticket:`4137`, when an object + refresh or column-based deferred load or unexpiration operation occurs. + Since we know the "shard" that the object originated from, we make + use of this value when refreshing, thereby avoiding queries against + other shards that don't match this object's identity in any case. .. change:: :tags: bug, sql @@ -1009,15 +1005,15 @@ of these issues as part of issue :ticket:`4258`. .. change:: - :tags: bug, ext - :tickets: 4266 + :tags: bug, ext + :tickets: 4266 - Fixed a race condition which could occur if automap - :meth:`.AutomapBase.prepare` were used within a multi-threaded context - against other threads which may call :func:`.configure_mappers` as a - result of use of other mappers. The unfinished mapping work of automap - is particularly sensitive to being pulled in by a - :func:`.configure_mappers` step leading to errors. + Fixed a race condition which could occur if automap + :meth:`.AutomapBase.prepare` were used within a multi-threaded context + against other threads which may call :func:`.configure_mappers` as a + result of use of other mappers. The unfinished mapping work of automap + is particularly sensitive to being pulled in by a + :func:`.configure_mappers` step leading to errors. .. change:: :tags: bug, orm @@ -1029,12 +1025,12 @@ the post criteria feature is now used by the lazy loader. .. change:: - :tags: bug, tests - :tickets: 4249 + :tags: bug, tests + :tickets: 4249 - Fixed a bug in the test suite where if an external dialect returned - ``None`` for ``server_version_info``, the exclusion logic would raise an - ``AttributeError``. + Fixed a bug in the test suite where if an external dialect returned + ``None`` for ``server_version_info``, the exclusion logic would raise an + ``AttributeError``. .. change:: :tags: bug, orm @@ -1954,18 +1950,18 @@ index implicitly added by Oracle onto the primary key columns. .. change:: - :tags: bug, orm - :tickets: 4071 + :tags: bug, orm + :tickets: 4071 - Removed the warnings that are emitted when the LRU caches employed - by the mapper as well as loader strategies reach their threshold; the - purpose of this warning was at first a guard against excess cache keys - being generated but became basically a check on the "creating many - engines" antipattern. While this is still an antipattern, the presence - of test suites which both create an engine per test as well as raise - on all warnings will be an inconvenience; it should not be critical - that such test suites change their architecture just for this warning - (though engine-per-test suite is always better). + Removed the warnings that are emitted when the LRU caches employed + by the mapper as well as loader strategies reach their threshold; the + purpose of this warning was at first a guard against excess cache keys + being generated but became basically a check on the "creating many + engines" antipattern. While this is still an antipattern, the presence + of test suites which both create an engine per test as well as raise + on all warnings will be an inconvenience; it should not be critical + that such test suites change their architecture just for this warning + (though engine-per-test suite is always better). .. change:: :tags: bug, orm @@ -2093,11 +2089,12 @@ Internal refinements to the :class:`.Enum`, :class:`.Interval`, and :class:`.Boolean` types, which now extend a common mixin :class:`.Emulated` that indicates a type that provides Python-side - emulation of a DB native type, switching out to the DB native type when a - supporting backend is in use. The PostgreSQL :class:`.INTERVAL` type - when used directly will now include the correct type coercion rules for - SQL expressions that also take effect for :class:`.sqltypes.Interval` - (such as adding a date to an interval yields a datetime). + emulation of a DB native type, switching out to the DB native type when + a supporting backend is in use. The PostgreSQL + :class:`.postgresql.INTERVAL` type when used directly will now include + the correct type coercion rules for SQL expressions that also take + effect for :class:`.sqltypes.Interval` (such as adding a date to an + interval yields a datetime). .. change:: diff --git a/doc/build/changelog/migration_08.rst b/doc/build/changelog/migration_08.rst index f8487d51d..b707a912c 100644 --- a/doc/build/changelog/migration_08.rst +++ b/doc/build/changelog/migration_08.rst @@ -413,7 +413,7 @@ and :meth:`.PropComparator.has`:: .. seealso:: - :ref:`of_type` + :ref:`inheritance_of_type` :ticket:`2438` :ticket:`1106` @@ -960,7 +960,7 @@ when features such as :meth:`.MetaData.create_all` and :func:`.cast` is used:: :ticket:`2276` "Prefixes" now supported for :func:`.update`, :func:`.delete` -------------------------------------------------------------- +---------------------------------------------------------------------------- Geared towards MySQL, a "prefix" can be rendered within any of these constructs. E.g.:: diff --git a/doc/build/changelog/migration_09.rst b/doc/build/changelog/migration_09.rst index 376e83236..df4a2c57f 100644 --- a/doc/build/changelog/migration_09.rst +++ b/doc/build/changelog/migration_09.rst @@ -83,7 +83,7 @@ accessor:: .. _migration_2736: :meth:`.Query.select_from` no longer applies the clause to corresponding entities ---------------------------------------------------------------------------------- +----------------------------------------------------------------------------------------------- The :meth:`.Query.select_from` method has been popularized in recent versions as a means of controlling the first thing that a :class:`.Query` object @@ -564,8 +564,9 @@ by that of most database documentation:: -- 0.9 behavior x = :x_1 COLLATE en_EN -The potentially backwards incompatible change arises if the :meth:`.collate` -operator is being applied to the right-hand column, as follows:: +The potentially backwards incompatible change arises if the +:meth:`.ColumnOperators.collate` operator is being applied to the right-hand +column, as follows:: print(column('x') == literal('somevalue').collate("en_EN")) diff --git a/doc/build/changelog/migration_13.rst b/doc/build/changelog/migration_13.rst index b749f5623..6f99bb708 100644 --- a/doc/build/changelog/migration_13.rst +++ b/doc/build/changelog/migration_13.rst @@ -1321,13 +1321,12 @@ SQL text from being rendered directly. "threadlocal" engine strategy deprecated ----------------------------------------- -The :ref:`"threadlocal" engine strategy <threadlocal_strategy>` was added -around SQLAlchemy 0.2, as a solution to the problem that the standard way of -operating in SQLAlchemy 0.1, which can be summed up as "threadlocal -everything", was found to be lacking. In retrospect, it seems fairly absurd -that by SQLAlchemy's first releases which were in every regard "alpha", that -there was concern that too many users had already settled on the existing API -to simply change it. +The "threadlocal engine strategy" was added around SQLAlchemy 0.2, as a +solution to the problem that the standard way of operating in SQLAlchemy 0.1, +which can be summed up as "threadlocal everything", was found to be lacking. +In retrospect, it seems fairly absurd that by SQLAlchemy's first releases which +were in every regard "alpha", that there was concern that too many users had +already settled on the existing API to simply change it. The original usage model for SQLAlchemy looked like this:: diff --git a/doc/build/changelog/migration_14.rst b/doc/build/changelog/migration_14.rst index b35818075..a5a3f83d0 100644 --- a/doc/build/changelog/migration_14.rst +++ b/doc/build/changelog/migration_14.rst @@ -23,7 +23,7 @@ What's New in SQLAlchemy 1.4? Behavioral Changes - General ============================ -.. _change_change_deferred_construction: +.. _change_deferred_construction: Many Core and ORM statement objects now perform much of their validation in the compile phase diff --git a/doc/build/changelog/migration_20.rst b/doc/build/changelog/migration_20.rst index d9fc6c6c4..c712ab991 100644 --- a/doc/build/changelog/migration_20.rst +++ b/doc/build/changelog/migration_20.rst @@ -731,7 +731,7 @@ ORM Query Unified with Core Select Tenative overall, however there will almost definitely be architectural changes in :class:`.Query` that move it closer to - :meth:`.select`. + :func:`.select`. The ``session.query(<cls>)`` pattern itself will likely **not** be fully removed. As this pattern is extremely prevalent and numerous within any diff --git a/doc/build/changelog/unreleased_10/4065.rst b/doc/build/changelog/unreleased_10/4065.rst deleted file mode 100644 index 375ce8cf1..000000000 --- a/doc/build/changelog/unreleased_10/4065.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. change:: - :tags: bug, mysql - :tickets: 4065 - :versions: 1.2.0b3, 1.1.14 - - mysqlclient as of 1.3.11 changed the exception - class for a particular disconnect situation from - InterfaceError to InternalError; the disconnection - detection logic now accommodates this. diff --git a/doc/build/changelog/unreleased_10/README.txt b/doc/build/changelog/unreleased_10/README.txt deleted file mode 100644 index 86b91221f..000000000 --- a/doc/build/changelog/unreleased_10/README.txt +++ /dev/null @@ -1,11 +0,0 @@ -Individual per-changelog files go here -in .rst format, which are pulled in by -changelog (version 0.4.0 or higher) to -be rendered into the changelog_xx.rst file. -At release time, the files here are removed and written -directly into the changelog. - -Rationale is so that multiple changes being merged -into gerrit don't produce conflicts. Note that -gerrit does not support custom merge handlers unlike -git itself. diff --git a/doc/build/changelog/unreleased_11/README.txt b/doc/build/changelog/unreleased_11/README.txt deleted file mode 100644 index 1d2b3446e..000000000 --- a/doc/build/changelog/unreleased_11/README.txt +++ /dev/null @@ -1,12 +0,0 @@ -Individual per-changelog files go here -in .rst format, which are pulled in by -changelog (version 0.4.0 or higher) to -be rendered into the changelog_xx.rst file. -At release time, the files here are removed and written -directly into the changelog. - -Rationale is so that multiple changes being merged -into gerrit don't produce conflicts. Note that -gerrit does not support custom merge handlers unlike -git itself. - diff --git a/doc/build/changelog/unreleased_12/README.txt b/doc/build/changelog/unreleased_12/README.txt deleted file mode 100644 index 1d2b3446e..000000000 --- a/doc/build/changelog/unreleased_12/README.txt +++ /dev/null @@ -1,12 +0,0 @@ -Individual per-changelog files go here -in .rst format, which are pulled in by -changelog (version 0.4.0 or higher) to -be rendered into the changelog_xx.rst file. -At release time, the files here are removed and written -directly into the changelog. - -Rationale is so that multiple changes being merged -into gerrit don't produce conflicts. Note that -gerrit does not support custom merge handlers unlike -git itself. - diff --git a/doc/build/changelog/unreleased_13/4138.rst b/doc/build/changelog/unreleased_13/4138.rst new file mode 100644 index 000000000..fd8f0eb90 --- /dev/null +++ b/doc/build/changelog/unreleased_13/4138.rst @@ -0,0 +1,5 @@ +.. change:: + :tags: schema + :tickets: 4138 + + Add ``comment`` attribute to :class:`.Column` ``__repr__`` method. diff --git a/doc/build/changelog/unreleased_13/README.txt b/doc/build/changelog/unreleased_13/README.txt deleted file mode 100644 index 1d2b3446e..000000000 --- a/doc/build/changelog/unreleased_13/README.txt +++ /dev/null @@ -1,12 +0,0 @@ -Individual per-changelog files go here -in .rst format, which are pulled in by -changelog (version 0.4.0 or higher) to -be rendered into the changelog_xx.rst file. -At release time, the files here are removed and written -directly into the changelog. - -Rationale is so that multiple changes being merged -into gerrit don't produce conflicts. Note that -gerrit does not support custom merge handlers unlike -git itself. - diff --git a/doc/build/changelog/unreleased_14/README.txt b/doc/build/changelog/unreleased_14/README.txt deleted file mode 100644 index 1d2b3446e..000000000 --- a/doc/build/changelog/unreleased_14/README.txt +++ /dev/null @@ -1,12 +0,0 @@ -Individual per-changelog files go here -in .rst format, which are pulled in by -changelog (version 0.4.0 or higher) to -be rendered into the changelog_xx.rst file. -At release time, the files here are removed and written -directly into the changelog. - -Rationale is so that multiple changes being merged -into gerrit don't produce conflicts. Note that -gerrit does not support custom merge handlers unlike -git itself. - diff --git a/doc/build/conf.py b/doc/build/conf.py index 805290a89..266b20e08 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -37,6 +37,7 @@ extensions = [ "changelog", "sphinx_paramlinks", ] +needs_extensions = {"zzzeeksphinx": "1.1.2"} # Add any paths that contain templates here, relative to this directory. # not sure why abspath() is needed here, some users @@ -88,6 +89,9 @@ changelog_render_changeset = "http://www.sqlalchemy.org/trac/changeset/%s" exclude_patterns = ["build", "**/unreleased*/*"] +# zzzeeksphinx makes these conversions when it is rendering the +# docstrings classes, methods, and functions within the scope of +# Sphinx autodoc autodocmods_convert_modname = { "sqlalchemy.sql.sqltypes": "sqlalchemy.types", "sqlalchemy.sql.type_api": "sqlalchemy.types", @@ -97,8 +101,10 @@ autodocmods_convert_modname = { "sqlalchemy.sql.dml": "sqlalchemy.sql.expression", "sqlalchemy.sql.ddl": "sqlalchemy.schema", "sqlalchemy.sql.base": "sqlalchemy.sql.expression", + "sqlalchemy.event.base": "sqlalchemy.event", "sqlalchemy.engine.base": "sqlalchemy.engine", "sqlalchemy.engine.result": "sqlalchemy.engine", + "sqlalchemy.util._collections": "sqlalchemy.util", } autodocmods_convert_modname_w_class = { @@ -106,6 +112,42 @@ autodocmods_convert_modname_w_class = { ("sqlalchemy.sql.base", "DialectKWArgs"): "sqlalchemy.sql.base", } +# on the referencing side, a newer zzzeeksphinx extension +# applies shorthand symbols to references so that we can have short +# names that are still using absolute references. +zzzeeksphinx_module_prefixes = { + "_sa": "sqlalchemy", + "_engine": "sqlalchemy.engine", + "_schema": "sqlalchemy.schema", + "_types": "sqlalchemy.types", + "_expression": "sqlalchemy.sql.expression", + "_functions": "sqlalchemy.sql.functions", + "_pool": "sqlalchemy.pool", + "_event": "sqlalchemy.event", + "_events": "sqlalchemy.events", + "_exc": "sqlalchemy.exc", + "_reflection": "sqlalchemy.engine.reflection", + "_orm": "sqlalchemy.orm", + "_query": "sqlalchemy.orm.query", + "_ormevent": "sqlalchemy.orm.event", + "_ormexc": "sqlalchemy.orm.exc", + "_baked": "sqlalchemy.ext.baked", + "_associationproxy": "sqlalchemy.ext.associationproxy", + "_automap": "sqlalchemy.ext.automap", + "_hybrid": "sqlalchemy.ext.hybrid", + "_compilerext": "sqlalchemy.ext.compiler", + "_mutable": "sqlalchemy.ext.mutable", + "_declarative": "sqlalchemy.ext.declarative", + "_future": "sqlalchemy.future", + "_futureorm": "sqlalchemy.future.orm", + "_postgresql": "sqlalchemy.dialects.postgresql", + "_mysql": "sqlalchemy.dialects.mysql", + "_mssql": "sqlalchemy.dialects.mssql", + "_oracle": "sqlalchemy.dialects.oracle", + "_sqlite": "sqlalchemy.dialects.sqlite", +} + + # The encoding of source files. # source_encoding = 'utf-8-sig' diff --git a/doc/build/core/connections.rst b/doc/build/core/connections.rst index 7c2793f34..2191dee6e 100644 --- a/doc/build/core/connections.rst +++ b/doc/build/core/connections.rst @@ -75,7 +75,7 @@ any transactional state or locks are removed, and the connection is ready for its next use. .. deprecated:: 2.0 The :class:`.ResultProxy` object is replaced in SQLAlchemy - 2.0 with a newly refined object known as :class:`.Result`. + 2.0 with a newly refined object known as :class:`.future.Result`. Our example above illustrated the execution of a textual SQL string, which should be invoked by using the :func:`.text` construct to indicate that diff --git a/doc/build/core/dml.rst b/doc/build/core/dml.rst index d83a52e7b..d116b67a5 100644 --- a/doc/build/core/dml.rst +++ b/doc/build/core/dml.rst @@ -16,19 +16,29 @@ constructs build on the intermediary :class:`.ValuesBase`. .. autoclass:: Delete :members: - :inherited-members: + + .. automethod:: Delete.where + + .. automethod:: Delete.returning .. autoclass:: Insert :members: - :inherited-members: + + .. automethod:: Insert.values + + .. automethod:: Insert.returning .. autoclass:: Update - :members: - :inherited-members: + :members: + + .. automethod:: Update.returning + + .. automethod:: Update.where + + .. automethod:: Update.values .. autoclass:: sqlalchemy.sql.expression.UpdateBase - :members: - :inherited-members: + :members: .. autoclass:: sqlalchemy.sql.expression.ValuesBase :members: diff --git a/doc/build/core/inspection.rst b/doc/build/core/inspection.rst index 313b4d6e7..01343102d 100644 --- a/doc/build/core/inspection.rst +++ b/doc/build/core/inspection.rst @@ -5,7 +5,9 @@ Runtime Inspection API ====================== .. automodule:: sqlalchemy.inspection - :members: + +.. autofunction:: sqlalchemy.inspect + Available Inspection Targets ---------------------------- diff --git a/doc/build/core/selectable.rst b/doc/build/core/selectable.rst index 4771222e8..2f69c0200 100644 --- a/doc/build/core/selectable.rst +++ b/doc/build/core/selectable.rst @@ -78,13 +78,16 @@ elements are themselves :class:`.ColumnElement` subclasses). .. autoclass:: Select :members: - :inherited-members: + :inherited-members: ClauseElement + :exclude-members: memoized_attribute, memoized_instancemethod .. autoclass:: Selectable :members: .. autoclass:: SelectBase :members: + :inherited-members: ClauseElement + :exclude-members: memoized_attribute, memoized_instancemethod .. autoclass:: Subquery :members: diff --git a/doc/build/errors.rst b/doc/build/errors.rst index ed5583b12..43e0b9fa4 100644 --- a/doc/build/errors.rst +++ b/doc/build/errors.rst @@ -489,7 +489,7 @@ Above, the ``cprop`` attribute is used inline before it has been mapped, however this ``cprop`` attribute is not a :class:`.Column`, it's a :class:`.ColumnProperty`, which is an interim object and therefore does not have the full functionality of either the :class:`.Column` object -or the :class:`.InstrmentedAttribute` object that will be mapped onto the +or the :class:`.InstrumentedAttribute` object that will be mapped onto the ``Bar`` class once the declarative process is complete. While the :class:`.ColumnProperty` does have a ``__clause_element__()`` method, diff --git a/doc/build/faq/metadata_schema.rst b/doc/build/faq/metadata_schema.rst index 2d1527294..94cbb1787 100644 --- a/doc/build/faq/metadata_schema.rst +++ b/doc/build/faq/metadata_schema.rst @@ -50,7 +50,7 @@ Does SQLAlchemy support ALTER TABLE, CREATE VIEW, CREATE TRIGGER, Schema Upgrade General ALTER support isn't present in SQLAlchemy directly. For special DDL on an ad-hoc basis, the :class:`.DDL` and related constructs can be used. -See :doc:`core/ddl` for a discussion on this subject. +See :ref:`metadata_ddl_toplevel` for a discussion on this subject. A more comprehensive option is to use schema migration tools, such as Alembic or SQLAlchemy-Migrate; see :ref:`schema_migrations` for discussion on this. diff --git a/doc/build/faq/performance.rst b/doc/build/faq/performance.rst index c30e96abb..65d6cc460 100644 --- a/doc/build/faq/performance.rst +++ b/doc/build/faq/performance.rst @@ -212,7 +212,7 @@ the profiling output of this intentionally slow operation can be seen like this: that is, we see many expensive calls within the ``type_api`` system, and the actual time consuming thing is the ``time.sleep()`` call. -Make sure to check the :doc:`Dialect documentation <dialects/index>` +Make sure to check the :ref:`Dialect documentation <dialect_toplevel>` for notes on known performance tuning suggestions at this level, especially for databases like Oracle. There may be systems related to ensuring numeric accuracy or string processing that may not be needed in all cases. @@ -295,14 +295,14 @@ ORM as a first-class component. For the use case of fast bulk inserts, the SQL generation and execution system that the ORM builds on top of -is part of the :doc:`Core <core/tutorial>`. Using this system directly, we can produce an INSERT that +is part of the :ref:`Core <sqlexpression_toplevel>`. Using this system directly, we can produce an INSERT that is competitive with using the raw database API directly. .. note:: - When using the psycopg2 dialect, consider making use of the - :ref:`batch execution helpers <psycopg2_batch_mode>` feature of psycopg2, - now supported directly by the SQLAlchemy psycopg2 dialect. + When using the psycopg2 dialect, consider making use of the :ref:`batch + execution helpers <psycopg2_executemany_mode>` feature of psycopg2, now + supported directly by the SQLAlchemy psycopg2 dialect. Alternatively, the SQLAlchemy ORM offers the :ref:`bulk_operations` suite of methods, which provide hooks into subsections of the unit of diff --git a/doc/build/glossary.rst b/doc/build/glossary.rst index c431fce3d..83bf0b43a 100644 --- a/doc/build/glossary.rst +++ b/doc/build/glossary.rst @@ -9,6 +9,25 @@ Glossary .. glossary:: :sorted: + relational + relational algebra + + An algrebraic system developed by Edgar F. Codd that is used for + modelling and querying the data stored in relational databases. + + .. seealso:: + + `Relational Algebra (via Wikipedia) <https://en.wikipedia.org/wiki/Relational_algebra>`_ + + selectable + A term used in SQLAlchemy to describe a SQL construct that represents + a collection of rows. It's largely similar to the concept of a + "relation" in :term:`relational algebra`. In SQLAlchemy, objects + that subclass the :class:`.Selectable` class are considered to be + usable as "selectables" when using SQLAlchemy Core. The two most + common constructs are that of the :class:`.Table` and that of the + :class:`.Select` statement. + annotations Annotations are a concept used internally by SQLAlchemy in order to store additional information along with :class:`.ClauseElement` objects. A Python @@ -121,6 +140,79 @@ Glossary :term:`DDL` + metadata + table metadata + The term "metadata" generally refers to "data that describes data"; + data that itself represents the format and/or structure of some other + kind of data. In SQLAlchemy, the term "metadata" typically refers to + the :class:`.MetaData` construct, which is a collection of information + about the tables, columns, constraints, and other :term:`DDL` objects + that may exist in a particular database. + + .. seealso:: + + `Metadata Mapping (via Martin Fowler) <https://www.martinfowler.com/eaaCatalog/metadataMapping.html>`_ + + version id column + In SQLAlchemy, this refers to the use of a particular table column that + tracks the "version" of a particular row, as the row changes values. While + there are different kinds of relational patterns that make use of a + "version id column" in different ways, SQLAlchemy's ORM includes a particular + feature that allows for such a column to be configured as a means of + testing for stale data when a row is being UPDATEd with new information. + If the last known "version" of this column does not match that of the + row when we try to put new data into the row, we know that we are + acting on stale information. + + There are also other ways of storing "versioned" rows in a database, + often referred to as "temporal" data. In addition to SQLAlchemy's + versioning feature, a few more examples are also present in the + documentation, see the links below. + + .. seealso:: + + :ref:`mapper_version_counter` - SQLAlchemy's built-in version id feature. + + :ref:`examples_versioning` - other examples of mappings that version rows + temporally. + + registry + An object, typically globally accessible, that contains long-lived + information about some program state that is generally useful to many + parts of a program. + + .. seealso:: + + `Registry (via Martin Fowler) <https://martinfowler.com/eaaCatalog/registry.html>`_ + + cascade + A term used in SQLAlchemy to describe how an ORM persistence action that + takes place on a particular object would extend into other objects + which are directly associated with that object. In SQLAlchemy, these + object associations are configured using the :func:`.relationship` + construct. :func:`.relationship` contains a parameter called + :paramref:`.relationship.cascade` which provides options on how certain + persistence operations may cascade. + + The term "cascades" as well as the general architecture of this system + in SQLAlchemy was borrowed, for better or worse, from the Hibernate + ORM. + + .. seealso:: + + :ref:`unitofwork_cascades` + + dialect + In SQLAlchemy, the "dialect" is a Python object that represents information + and methods that allow database operations to proceed on a particular + kind of database backend and a particular kind of Python driver (or + :term`DBAPI`) for that database. SQLAlchemy dialects are subclasses + of the :class:`.Dialect` class. + + .. seealso:: + + :ref:`engines_toplevel` + discriminator A result-set column which is used during :term:`polymorphic` loading to determine what kind of mapped class should be applied to a particular @@ -159,7 +251,16 @@ Glossary .. seealso:: - Martin Fowler - Identity Map - http://martinfowler.com/eaaCatalog/identityMap.html + `Identity Map (via Martin Fowler) <http://martinfowler.com/eaaCatalog/identityMap.html>`_ + + lazy initialization + A tactic of delaying some initialization action, such as creating objects, + populating data, or establishing connectivity to other services, until + those resources are required. + + .. seealso:: + + `Lazy initialization (via Wikipedia) <https://en.wikipedia.org/wiki/Lazy_initialization>`_ lazy load lazy loads @@ -174,16 +275,33 @@ Glossary the complexity and time spent within object fetches can sometimes be reduced, in that attributes for related tables don't need to be addressed - immediately. + immediately. Lazy loading is the opposite of :term:`eager loading`. .. seealso:: - `Lazy Load (on Martin Fowler) <http://martinfowler.com/eaaCatalog/lazyLoad.html>`_ + `Lazy Load (via Martin Fowler) <http://martinfowler.com/eaaCatalog/lazyLoad.html>`_ :term:`N plus one problem` :doc:`orm/loading_relationships` + eager load + eager loads + eager loaded + eager loading + + In object relational mapping, an "eager load" refers to + an attribute that is populated with its database-side value + at the same time as when the object itself is loaded from the database. + In SQLAlchemy, "eager loading" usually refers to related collections + of objects that are mapped using the :func:`.relationship` construct. + Eager loading is the opposite of :term:`lazy loading`. + + .. seealso:: + + :doc:`orm/loading_relationships` + + mapping mapped We say a class is "mapped" when it has been passed through the @@ -191,7 +309,7 @@ Glossary class with a database table or other :term:`selectable` construct, so that instances of it can be persisted using a :class:`.Session` as well as loaded using a - :class:`.Query`. + :class:`.query.Query`. N plus one problem The N plus one problem is a common side effect of the @@ -239,7 +357,7 @@ Glossary The two SQLAlchemy objects that make the most use of method chaining are the :class:`~.expression.Select` - object and the :class:`~.orm.query.Query` object. + object and the :class:`.orm.query.Query` object. For example, a :class:`~.expression.Select` object can be assigned two expressions to its WHERE clause as well as an ORDER BY clause by calling upon the :meth:`~.Select.where` @@ -325,7 +443,7 @@ Glossary .. seealso:: - `Domain Model (wikipedia) <http://en.wikipedia.org/wiki/Domain_model>`_ + `Domain Model (via Wikipedia) <http://en.wikipedia.org/wiki/Domain_model>`_ unit of work This pattern is where the system transparently keeps @@ -336,7 +454,7 @@ Glossary .. seealso:: - `Unit of Work by Martin Fowler <http://martinfowler.com/eaaCatalog/unitOfWork.html>`_ + `Unit of Work (via Martin Fowler) <http://martinfowler.com/eaaCatalog/unitOfWork.html>`_ :doc:`orm/session` @@ -530,7 +648,7 @@ Glossary :term:`durability` - http://en.wikipedia.org/wiki/ACID_Model + `ACID Model (via Wikipedia) <http://en.wikipedia.org/wiki/ACID_Model>`_ atomicity Atomicity is one of the components of the :term:`ACID` model, @@ -545,7 +663,7 @@ Glossary :term:`ACID` - http://en.wikipedia.org/wiki/Atomicity_(database_systems) + `Atomicity (via Wikipedia) <http://en.wikipedia.org/wiki/Atomicity_(database_systems)>`_ consistency Consistency is one of the components of the :term:`ACID` model, @@ -560,10 +678,11 @@ Glossary :term:`ACID` - http://en.wikipedia.org/wiki/Consistency_(database_systems) + `Consistency (via Wikipedia) <http://en.wikipedia.org/wiki/Consistency_(database_systems)>`_ isolation isolated + isolation level The isolation property of the :term:`ACID` model ensures that the concurrent execution of transactions results in a system state that would be @@ -577,7 +696,57 @@ Glossary :term:`ACID` - http://en.wikipedia.org/wiki/Isolation_(database_systems) + `Isolation (via Wikipedia) <http://en.wikipedia.org/wiki/Isolation_(database_systems)>`_ + + :term:`read uncommitted` + + :term:`read committed` + + :term:`repeatable read` + + :term:`serializable` + + repeatable read + One of the four database :term:`isolation` levels, repeatable read + features all of the isolation of :term:`read committed`, and + additionally features that any particular row that is read within a + transaction is guaranteed from that point to not have any subsequent + external changes in value (i.e. from other concurrent UPDATE + statements) for the duration of that transaction. + + read committed + One of the four database :term:`isolation` levels, read committed + features that the transaction will not be exposed to any data from + other concurrent transactions that has not been committed yet, + preventing so-called "dirty reads". However, under read committed + there can be non-repeatable reads, meaning data in a row may change + when read a second time if another transaction has committed changes. + + read uncommitted + One of the four database :term:`isolation` levels, read uncommitted + features that changes made to database data within a transaction will + not become permanent until the transaction is committed. However, + within read uncommitted, it may be possible for data that is not + committed in other transactions to be viewable within the scope of + another transaction; these are known as "dirty reads". + + serializable + One of the four database :term:`isolation` levels, serializable + features all of the isolation of :term:`repeatable read`, and + additionally within a lock-based approach guarantees that so-called + "phantom reads" cannot occur; this means that rows which are INSERTed + or DELETEd within the scope of other transactions will not be + detectable within this transaction. A row that is read within this + transaction is guaranteed to continue existing, and a row that does not + exist is guaranteed that it cannot appear of inserted from another + transaction. + + Serializable isolation typically relies upon locking of rows or ranges + of rows in order to achieve this effect and can increase the chance of + deadlocks and degrade performance. There are also non-lock based + schemes however these necessarily rely upon rejecting transactions if + write collisions are detected. + durability Durability is a property of the :term:`ACID` model @@ -593,7 +762,7 @@ Glossary :term:`ACID` - http://en.wikipedia.org/wiki/Durability_(database_systems) + `Durability (via Wikipedia) <http://en.wikipedia.org/wiki/Durability_(database_systems)>`_ RETURNING This is a non-SQL standard clause provided in various forms by @@ -970,7 +1139,7 @@ Glossary :term:`primary key` - http://en.wikipedia.org/wiki/Candidate_key + `Candidate key (via Wikipedia) <http://en.wikipedia.org/wiki/Candidate_key>`_ https://www.databasestar.com/database-keys/ @@ -978,7 +1147,7 @@ Glossary primary key constraint A :term:`constraint` that uniquely defines the characteristics - of each :term:`row`. The primary key has to consist of + of each row in a table. The primary key has to consist of characteristics that cannot be duplicated by any other row. The primary key may consist of a single attribute or multiple attributes in combination. @@ -998,7 +1167,7 @@ Glossary .. seealso:: - http://en.wikipedia.org/wiki/Primary_Key + `Primary key (via Wikipedia) <http://en.wikipedia.org/wiki/Primary_Key>`_ foreign key constraint A referential constraint between two tables. A foreign key is a field or set of fields in a @@ -1016,7 +1185,7 @@ Glossary .. seealso:: - http://en.wikipedia.org/wiki/Foreign_key_constraint + `Foreign Key Constraint (via Wikipedia) <http://en.wikipedia.org/wiki/Foreign_key_constraint>`_ check constraint @@ -1036,7 +1205,7 @@ Glossary .. seealso:: - http://en.wikipedia.org/wiki/Check_constraint + `CHECK constraint (via Wikipedia) <http://en.wikipedia.org/wiki/Check_constraint>`_ unique constraint unique key index @@ -1053,7 +1222,7 @@ Glossary .. seealso:: - http://en.wikipedia.org/wiki/Unique_key#Defining_unique_keys + `Unique key (via Wikipedia) <http://en.wikipedia.org/wiki/Unique_key#Defining_unique_keys>`_ transient This describes one of the major object states which diff --git a/doc/build/orm/examples.rst b/doc/build/orm/examples.rst index d3a1f8d31..e8bb894fd 100644 --- a/doc/build/orm/examples.rst +++ b/doc/build/orm/examples.rst @@ -90,6 +90,8 @@ XML Persistence .. automodule:: examples.elementtree +.. _examples_versioning: + Versioning Objects ------------------ diff --git a/doc/build/orm/extensions/declarative/inheritance.rst b/doc/build/orm/extensions/declarative/inheritance.rst index f23410683..b98843816 100644 --- a/doc/build/orm/extensions/declarative/inheritance.rst +++ b/doc/build/orm/extensions/declarative/inheritance.rst @@ -248,4 +248,3 @@ on concrete inheritance for details. :ref:`concrete_inheritance` - :ref:`inheritance_concrete_helpers` diff --git a/doc/build/orm/join_conditions.rst b/doc/build/orm/join_conditions.rst index a317c6ecc..68a2949ff 100644 --- a/doc/build/orm/join_conditions.rst +++ b/doc/build/orm/join_conditions.rst @@ -700,6 +700,8 @@ complexity is kept within the middle. sometimes ways to make relationships like the above writable, this is generally complicated and error prone. +.. _relationship_non_primary_mapper: + .. _relationship_aliased_class: Relationship to Aliased Class diff --git a/doc/build/orm/mapping_api.rst b/doc/build/orm/mapping_api.rst index 99e0ba52d..250bd26a4 100644 --- a/doc/build/orm/mapping_api.rst +++ b/doc/build/orm/mapping_api.rst @@ -15,8 +15,8 @@ Class Mapping API .. autofunction:: sqlalchemy.orm.util.identity_key -.. autofunction:: sqlalchemy.orm.util.polymorphic_union +.. autofunction:: polymorphic_union -.. autoclass:: sqlalchemy.orm.mapper.Mapper +.. autoclass:: Mapper :members: diff --git a/doc/build/orm/nonstandard_mappings.rst b/doc/build/orm/nonstandard_mappings.rst index 7516d1b54..522359854 100644 --- a/doc/build/orm/nonstandard_mappings.rst +++ b/doc/build/orm/nonstandard_mappings.rst @@ -157,35 +157,21 @@ Multiple Mappers for One Class ============================== In modern SQLAlchemy, a particular class is mapped by only one so-called -**primary** mapper at a time. This mapper is involved in three main -areas of functionality: querying, persistence, and instrumentation of the -mapped class. The rationale of the primary mapper relates to the fact -that the :func:`.mapper` modifies the class itself, not only -persisting it towards a particular :class:`.Table`, but also :term:`instrumenting` -attributes upon the class which are structured specifically according to the -table metadata. It's not possible for more than one mapper -to be associated with a class in equal measure, since only one mapper can -actually instrument the class. - -However, there is a class of mapper known as the **non primary** mapper -which allows additional mappers to be associated with a class, but with -a limited scope of use. This scope typically applies to -being able to load rows from an alternate table or selectable unit, but -still producing classes which are ultimately persisted using the primary -mapping. The non-primary mapper is created using the classical style -of mapping against a class that is already mapped with a primary mapper, -and involves the use of the :paramref:`~sqlalchemy.orm.mapper.non_primary` -flag. - -The non primary mapper is of very limited use in modern SQLAlchemy, as the -task of being able to load classes from subqueries or other compound statements -can be now accomplished using the :class:`.Query` object directly. - -There is really only one use case for the non-primary mapper, which is that -we wish to build a :func:`.relationship` to such a mapper; this is useful -in the rare and advanced case that our relationship is attempting to join two -classes together using many tables and/or joins in between. An example of this -pattern is at :ref:`relationship_non_primary_mapper`. +**primary** mapper at a time. This mapper is involved in three main areas of +functionality: querying, persistence, and instrumentation of the mapped class. +The rationale of the primary mapper relates to the fact that the +:func:`.mapper` modifies the class itself, not only persisting it towards a +particular :class:`.Table`, but also :term:`instrumenting` attributes upon the +class which are structured specifically according to the table metadata. It's +not possible for more than one mapper to be associated with a class in equal +measure, since only one mapper can actually instrument the class. + +The concept of a "non-primary" mapper had existed for many versions of +SQLAlchemy however as of version 1.3 this feature is deprecated. The +one case where such a non-primary mapper is useful is when constructing +a relationship to a class against an alternative selectable. This +use case is now suited using the :class:`.aliased` construct and is described +at :ref:`relationship_aliased_class`. As far as the use case of a class that can actually be fully persisted to different tables under different scenarios, very early versions of diff --git a/doc/build/orm/session_state_management.rst b/doc/build/orm/session_state_management.rst index 2730bb8b2..8b4d7a572 100644 --- a/doc/build/orm/session_state_management.rst +++ b/doc/build/orm/session_state_management.rst @@ -543,8 +543,8 @@ or loaded with :meth:`~.Session.refresh` varies based on several factors, includ expired column-based attributes being accessed. * Regarding relationships, :meth:`~.Session.refresh` is more restrictive than - :meth:`~.Session.expire` with regards to attributes that aren't column-mapped. - Calling :meth:`.refresh` and passing a list of names that only includes + :meth:`.Session.expire` with regards to attributes that aren't column-mapped. + Calling :meth:`.Session.refresh` and passing a list of names that only includes relationship-mapped attributes will actually raise an error. In any case, non-eager-loading :func:`.relationship` attributes will not be included in any refresh operation. @@ -620,7 +620,7 @@ The second bullet has the important caveat that "it is also known that the isola allow this data to be visible." This means that it cannot be assumed that an UPDATE that happened on another database connection will yet be visible here locally; in many cases, it will not. This is why if one wishes to use -:meth:`.expire` or :meth:`.refresh` in order to view data between ongoing +:meth:`.Session.expire` or :meth:`.Session.refresh` in order to view data between ongoing transactions, an understanding of the isolation behavior in effect is essential. .. seealso:: diff --git a/doc/build/orm/tutorial.rst b/doc/build/orm/tutorial.rst index 0c9771e02..19fdd09bc 100644 --- a/doc/build/orm/tutorial.rst +++ b/doc/build/orm/tutorial.rst @@ -1651,7 +1651,7 @@ was emitted. If you want to reduce the number of queries (dramatically, in many we can apply an :term:`eager load` to the query operation. SQLAlchemy offers three types of eager loading, two of which are automatic, and a third which involves custom criterion. All three are usually invoked via functions known -as :term:`query options` which give additional instructions to the :class:`.Query` on how +as query options which give additional instructions to the :class:`.Query` on how we would like various attributes to be loaded, via the :meth:`.Query.options` method. Selectin Load diff --git a/doc/build/replacments/fix_xref_state.txt b/doc/build/replacments/fix_xref_state.txt new file mode 100644 index 000000000..380657df2 --- /dev/null +++ b/doc/build/replacments/fix_xref_state.txt @@ -0,0 +1,93 @@ +.MetaData _schema.MetaData +.ForeignKey _schema.ForeignKey +.ForeignKeyConstraint _schema.ForeignKeyConstraint +.PoolEvents _events.PoolEvents +.DisconnectionError _exc.DisconnectionError +.SADeprecationWarning _exc.SADeprecationWarning +.Engine _engine.Engine +.Pool _pool.Pool +.future _future +.inspect _sa.inspect +.Inspector _reflection.Inspector +.orm _orm +.Mapper _orm.Mapper +.engine _engine +.JSON _types.JSON +.postgresql _postgresql +.types _types +._mysql _mysql +.sqlite _sqlite +.array_agg _functions.array_agg +.TIMESTAMP _types.TIMESTAMP +.JSONB _postgresql.JSONB +.ARRAY _types.ARRAY +.mssql _mssql +.sqltypes _types +.functions _functions +.INTERVAL _postgresql.INTERVAL +.INTERVAL _oracle.INTERVAL +.oracle _oracle +.NCHAR _types.NCHAR +.Query _query.Query +.relationship _orm.relationship +.FromClause _expression.FromClause +.join _expression.join +.SelectBase _expression.SelectBase +.Load _orm.Load +.joinedload _orm.joinedload +.sql _expression +.sql.expression _expression +.INTEGER _types.INTEGER +.VARBINARY _types.VARBINARY +.joinedload_all _orm.joinedload_all +.Insert _expression.Insert +.Update _expression.Update +.Delete _expression.Delete +.insert _expression.insert +.update _expression.update +.delete _expression.delete +.select _expression.select +.expression _expression +.future _future.Subquery _expression.Subquery +.Select _expression.Select +.ReturnsRows _expression.ReturnsRows +.ColumnCollection _expression.ColumnCollection +.ColumnElement _expression.ColumnElement +.Selectable expression.Selectable +.Lateral _expression.Lateral +.HasPrefixes _expression.HasPrefixes +.prefix_with _expression.HasPrefixes.prefix_with +.ClauseElement _expression.ClauseElement +.HasSuffixes _expression.HasSuffixes +.suffix_with _expression.HasSuffixes.suffix_with +.Table _schema.Table +.Join _expression.Join +.Alias _expression.Alias +.TableSample _expression.TableSample +.CTE _expression.CTE +.HasCte _expression.HasCTE +.HasCTE _expression.HasCTE +.CompoundSelect _selectable.CompoundSelect +.TextualSelect _expression.TextualSelect +.TableClause _expression.TableClause +.schema _schema +.Values _expression.Values +.column _expression.column +.GenerativeSelect _expression.GenerativeSelect +.Column _schema.Column +.union _expression.union +.union_all _expression.union_all +.intersect _expression.intersect +.intersect_all _expression.intersect_all +.except _expression.except +.except_all _expression.except_all +.Text _expression.TextClause +.text _expression.text +.literal_column _expression.literal_column +.Connection _engine.Connection +.Engine _engine.Engine +.apply_labels _expression.Select.apply_labels +.BooleanClauseList _expression.BooleanClauseList +.ScalarSelect _expression.ScalarSelect +.Exists _expression.Exists +.TextClause _expression.TextClause diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index a7086259b..43f3aeb04 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -391,9 +391,10 @@ behavior of this flag is as follows: * Complete control over whether the "old" or "new" types are rendered is available in all SQLAlchemy versions by using the UPPERCASE type objects - instead: :class:`.NVARCHAR`, :class:`.VARCHAR`, :class:`.types.VARBINARY`, - :class:`.TEXT`, :class:`.mssql.NTEXT`, :class:`.mssql.IMAGE` will always - remain fixed and always output exactly that type. + instead: :class:`.types.NVARCHAR`, :class:`.types.VARCHAR`, + :class:`.types.VARBINARY`, :class:`.types.TEXT`, :class:`.mssql.NTEXT`, + :class:`.mssql.IMAGE` will always remain fixed and always output exactly that + type. .. versionadded:: 1.0.0 diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index cf521f06f..89a63fd47 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -140,6 +140,8 @@ The following DBAPI-specific options are respected when used with .. versionchanged:: 1.4 The ``max_row_buffer`` size can now be greater than 1000, and the buffer will grow to that size. +.. _psycopg2_batch_mode: + .. _psycopg2_executemany_mode: Psycopg2 Fast Execution Helpers diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py index 807f9488d..72bbd0177 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py @@ -325,7 +325,7 @@ ourselves. This is achieved using two event listeners:: conn.exec_driver_sql("BEGIN") .. warning:: When using the above recipe, it is advised to not use the - :paramref:`.execution_options.isolation_level` setting on + :paramref:`.Connection.execution_options.isolation_level` setting on :class:`.Connection` and :func:`.create_engine` with the SQLite driver, as this function necessarily will also alter the ".isolation_level" setting. diff --git a/lib/sqlalchemy/engine/create.py b/lib/sqlalchemy/engine/create.py index 3d50b0828..2831f5e7d 100644 --- a/lib/sqlalchemy/engine/create.py +++ b/lib/sqlalchemy/engine/create.py @@ -610,7 +610,7 @@ def engine_from_config(configuration, prefix="sqlalchemy.", **kwargs): :param configuration: A dictionary (typically produced from a config file, but this is not a requirement). Items whose keys start with the value of 'prefix' will have that prefix stripped, and will then be passed to - :ref:`create_engine`. + :func:`.create_engine`. :param prefix: Prefix to match and then strip from keys in 'configuration'. diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index ba998aff0..be44f67e7 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -1551,7 +1551,7 @@ class ResultProxy(BaseResult): yield row def close(self): - """Close this ResultProxy. + """Close this :class:`.ResultProxy`. This closes out the underlying DBAPI cursor corresponding to the statement execution, if one is still present. Note that the @@ -1567,7 +1567,7 @@ class ResultProxy(BaseResult): .. deprecated:: 2.0 "connectionless" execution is deprecated and will be removed in version 2.0. Version 2.0 will feature the - :class:`.Result` object that will no longer affect the status + :class:`.future.Result` object that will no longer affect the status of the originating connection in any case. After this method is called, it is no longer valid to call upon diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py index f00b642db..599bf966d 100644 --- a/lib/sqlalchemy/ext/associationproxy.py +++ b/lib/sqlalchemy/ext/associationproxy.py @@ -81,7 +81,7 @@ def association_proxy(target_collection, attr, **kw): ASSOCIATION_PROXY = util.symbol("ASSOCIATION_PROXY") -"""Symbol indicating an :class:`InspectionAttr` that's +"""Symbol indicating an :class:`.InspectionAttr` that's of type :class:`.AssociationProxy`. Is assigned to the :attr:`.InspectionAttr.extension_type` diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py index b1574339d..825c1d3f3 100644 --- a/lib/sqlalchemy/ext/declarative/api.py +++ b/lib/sqlalchemy/ext/declarative/api.py @@ -442,8 +442,6 @@ class ConcreteBase(object): :ref:`concrete_inheritance` - :ref:`inheritance_concrete_helpers` - """ @@ -582,8 +580,6 @@ class AbstractConcreteBase(ConcreteBase): :ref:`concrete_inheritance` - :ref:`inheritance_concrete_helpers` - """ __no_table__ = True diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py index 029a28c68..53118c573 100644 --- a/lib/sqlalchemy/orm/__init__.py +++ b/lib/sqlalchemy/orm/__init__.py @@ -239,7 +239,7 @@ defaultload = strategy_options.defaultload._unbound_fn selectin_polymorphic = strategy_options.selectin_polymorphic._unbound_fn -@_sa_util.deprecated_20("relation", "Please use :func:`joinedload`.") +@_sa_util.deprecated_20("eagerload", "Please use :func:`_orm.joinedload`.") def eagerload(*args, **kwargs): """A synonym for :func:`joinedload()`.""" return joinedload(*args, **kwargs) diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py index 2bacb25b0..82979b188 100644 --- a/lib/sqlalchemy/orm/attributes.py +++ b/lib/sqlalchemy/orm/attributes.py @@ -446,10 +446,10 @@ class Event(object): .. versionadded:: 0.9.0 - :var impl: The :class:`.AttributeImpl` which is the current event + :attribute impl: The :class:`.AttributeImpl` which is the current event initiator. - :var op: The symbol :attr:`.OP_APPEND`, :attr:`.OP_REMOVE`, + :attribute op: The symbol :attr:`.OP_APPEND`, :attr:`.OP_REMOVE`, :attr:`.OP_REPLACE`, or :attr:`.OP_BULK_REPLACE`, indicating the source operation. diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py index 1fe51514e..6475f79de 100644 --- a/lib/sqlalchemy/orm/strategy_options.py +++ b/lib/sqlalchemy/orm/strategy_options.py @@ -982,9 +982,9 @@ class loader_option(object): self._unbound_fn = fn fn_doc = self.fn.__doc__ self.fn.__doc__ = """Produce a new :class:`.Load` object with the -:func:`.orm.%(name)s` option applied. +:func:`_orm.%(name)s` option applied. -See :func:`.orm.%(name)s` for usage examples. +See :func:`_orm.%(name)s` for usage examples. """ % { "name": self.name @@ -994,13 +994,14 @@ See :func:`.orm.%(name)s` for usage examples. return self def _add_unbound_all_fn(self, fn): - fn.__doc__ = """Produce a standalone "all" option for :func:`.orm.%(name)s`. + fn.__doc__ = """Produce a standalone "all" option for +:func:`_orm.%(name)s`. .. deprecated:: 0.9 - The :func:`.%(name)s_all` function is deprecated, and will be removed - in a future release. Please use method chaining with :func:`.%(name)s` - instead, as in:: + The :func:`_orm.%(name)s_all` function is deprecated, and will be removed + in a future release. Please use method chaining with + :func:`_orm.%(name)s` instead, as in:: session.query(MyClass).options( %(name)s("someattribute").%(name)s("anotherattribute") @@ -1751,7 +1752,7 @@ def selectin_polymorphic(loadopt, classes): .. seealso:: - :ref:`inheritance_polymorphic_load` + :ref:`polymorphic_selectin` """ loadopt.set_class_strategy( diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py index cbcf54d1c..1ac3acd8a 100644 --- a/lib/sqlalchemy/sql/dml.py +++ b/lib/sqlalchemy/sql/dml.py @@ -200,47 +200,47 @@ class UpdateBase( param_to_method_lookup = dict( whereclause=( - "The :paramref:`.%(func)s.whereclause` parameter " + "The :paramref:`%(func)s.whereclause` parameter " "will be removed " "in SQLAlchemy 2.0. Please refer to the " ":meth:`.%(classname)s.where` method." ), values=( - "The :paramref:`.%(func)s.values` parameter will be removed " + "The :paramref:`%(func)s.values` parameter will be removed " "in SQLAlchemy 2.0. Please refer to the " - ":meth:`.%(classname)s.values` method." + ":meth:`%(classname)s.values` method." ), bind=( - "The :paramref:`.%(func)s.bind` parameter will be removed in " + "The :paramref:`%(func)s.bind` parameter will be removed in " "SQLAlchemy 2.0. Please use explicit connection execution." ), inline=( - "The :paramref:`.%(func)s.inline` parameter will be " + "The :paramref:`%(func)s.inline` parameter will be " "removed in " "SQLAlchemy 2.0. Please use the " - ":meth:`.%(classname)s.inline` method." + ":meth:`%(classname)s.inline` method." ), prefixes=( - "The :paramref:`.%(func)s.prefixes parameter will be " + "The :paramref:`%(func)s.prefixes parameter will be " "removed in " "SQLAlchemy 2.0. Please use the " - ":meth:`.%(classname)s.prefix_with` " + ":meth:`%(classname)s.prefix_with` " "method." ), return_defaults=( - "The :paramref:`.%(func)s.return_defaults` parameter will be " + "The :paramref:`%(func)s.return_defaults` parameter will be " "removed in SQLAlchemy 2.0. Please use the " - ":meth:`.%(classname)s.return_defaults` method." + ":meth:`%(classname)s.return_defaults` method." ), returning=( - "The :paramref:`.%(func)s.returning` parameter will be " + "The :paramref:`%(func)s.returning` parameter will be " "removed in SQLAlchemy 2.0. Please use the " - ":meth:`.%(classname)s.returning`` method." + ":meth:`%(classname)s.returning`` method." ), preserve_parameter_order=( "The :paramref:`%(func)s.preserve_parameter_order` parameter " "will be removed in SQLAlchemy 2.0. Use the " - ":meth:`.%(classname)s.ordered_values` method with a list " + ":meth:`%(classname)s.ordered_values` method with a list " "of tuples. " ), ) @@ -250,7 +250,10 @@ class UpdateBase( name: ( "2.0", param_to_method_lookup[name] - % {"func": fn_name, "classname": clsname}, + % { + "func": "_expression.%s" % fn_name, + "classname": "_expression.%s" % clsname, + }, ) for name in names } @@ -546,25 +549,13 @@ class ValuesBase(UpdateBase): callable is invoked for each row. See :ref:`bug_3288` for other details. - The :class:`.Update` construct supports a special form which is a - list of 2-tuples, which when provided must be passed in conjunction - with the - :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` - parameter. - This form causes the UPDATE statement to render the SET clauses - using the order of parameters given to :meth:`.Update.values`, rather - than the ordering of columns given in the :class:`.Table`. - - .. versionadded:: 1.0.10 - added support for parameter-ordered - UPDATE statements via the - :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` - flag. + The UPDATE construct also supports rendering the SET parameters + in a specific order. For this feature refer to the + :meth:`.Update.ordered_values` method. .. seealso:: - :ref:`updates_order_parameters` - full example of the - :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` - flag + :meth:`.Update.ordered_values` .. seealso:: @@ -1064,8 +1055,8 @@ class Update(DMLWhereBase, ValuesBase): .. seealso:: - :ref:`updates_order_parameters` - full example of the - :paramref:`~.update.preserve_parameter_order` flag + :ref:`updates_order_parameters` - illustrates the + :meth:`.Update.ordered_values` method. If both ``values`` and compile-time bind parameters are present, the compile-time bind parameters override the information specified @@ -1089,7 +1080,8 @@ class Update(DMLWhereBase, ValuesBase): etc. when combining :func:`~.sql.expression.select` constructs within the - values clause of an :func:`.update` construct, the subquery represented + values clause of an :func:`.update` + construct, the subquery represented by the :func:`~.sql.expression.select` should be *correlated* to the parent table, that is, providing criterion which links the table inside the subquery to the outer table being updated:: @@ -1135,8 +1127,7 @@ class Update(DMLWhereBase, ValuesBase): .. seealso:: :ref:`updates_order_parameters` - full example of the - :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` - flag + :meth:`.Update.ordered_values` method. .. versionchanged:: 1.4 The :meth:`.Update.ordered_values` method supersedes the :paramref:`.update.preserve_parameter_order` @@ -1219,7 +1210,7 @@ class Delete(DMLWhereBase, UpdateBase): prefixes=None, **dialect_kw ): - """Construct :class:`.Delete` object. + r"""Construct :class:`.Delete` object. Similar functionality is available via the :meth:`~.TableClause.delete` method on diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index d65821c5c..ac3fb9607 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -1382,6 +1382,8 @@ class Column(DialectKWArgs, SchemaItem, ColumnClause): kwarg.append("default") if self.server_default: kwarg.append("server_default") + if self.comment: + kwarg.append("comment") return "Column(%s)" % ", ".join( [repr(self.name)] + [repr(self.type)] diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index 9c593ea5d..08a237636 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -3424,7 +3424,7 @@ class Select( All arguments which accept :class:`.ClauseElement` arguments also accept string arguments, which will be converted as appropriate into - either :func:`text()` or :func:`literal_column()` constructs. + either :func:`.text()` or :func:`.literal_column()` constructs. .. seealso:: diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py index fae68da98..ca5bde091 100644 --- a/lib/sqlalchemy/sql/util.py +++ b/lib/sqlalchemy/sql/util.py @@ -249,7 +249,9 @@ def find_tables( _visitors["join"] = tables.append if include_aliases: - _visitors["alias"] = tables.append + _visitors["alias"] = _visitors["subquery"] = _visitors[ + "tablesample" + ] = _visitors["lateral"] = tables.append if include_crud: _visitors["insert"] = _visitors["update"] = _visitors[ diff --git a/lib/sqlalchemy/testing/suite/test_cte.py b/lib/sqlalchemy/testing/suite/test_cte.py index c7e6a266c..fab457606 100644 --- a/lib/sqlalchemy/testing/suite/test_cte.py +++ b/lib/sqlalchemy/testing/suite/test_cte.py @@ -37,16 +37,17 @@ class CTETest(fixtures.TablesTest): @classmethod def insert_data(cls): - config.db.execute( - cls.tables.some_table.insert(), - [ - {"id": 1, "data": "d1", "parent_id": None}, - {"id": 2, "data": "d2", "parent_id": 1}, - {"id": 3, "data": "d3", "parent_id": 1}, - {"id": 4, "data": "d4", "parent_id": 3}, - {"id": 5, "data": "d5", "parent_id": 3}, - ], - ) + with config.db.connect() as conn: + conn.execute( + cls.tables.some_table.insert(), + [ + {"id": 1, "data": "d1", "parent_id": None}, + {"id": 2, "data": "d2", "parent_id": 1}, + {"id": 3, "data": "d3", "parent_id": 1}, + {"id": 4, "data": "d4", "parent_id": 3}, + {"id": 5, "data": "d5", "parent_id": 3}, + ], + ) def test_select_nonrecursive_round_trip(self): some_table = self.tables.some_table diff --git a/lib/sqlalchemy/testing/suite/test_ddl.py b/lib/sqlalchemy/testing/suite/test_ddl.py index 81a55e18a..1f49106fb 100644 --- a/lib/sqlalchemy/testing/suite/test_ddl.py +++ b/lib/sqlalchemy/testing/suite/test_ddl.py @@ -67,25 +67,27 @@ class TableDDLTest(fixtures.TestBase): @requirements.comment_reflection @util.provide_metadata - def test_add_table_comment(self): + def test_add_table_comment(self, connection): table = self._simple_fixture() - table.create(config.db, checkfirst=False) + table.create(connection, checkfirst=False) table.comment = "a comment" - config.db.execute(schema.SetTableComment(table)) + connection.execute(schema.SetTableComment(table)) eq_( - inspect(config.db).get_table_comment("test_table"), + inspect(connection).get_table_comment("test_table"), {"text": "a comment"}, ) @requirements.comment_reflection @util.provide_metadata - def test_drop_table_comment(self): + def test_drop_table_comment(self, connection): table = self._simple_fixture() - table.create(config.db, checkfirst=False) + table.create(connection, checkfirst=False) table.comment = "a comment" - config.db.execute(schema.SetTableComment(table)) - config.db.execute(schema.DropTableComment(table)) - eq_(inspect(config.db).get_table_comment("test_table"), {"text": None}) + connection.execute(schema.SetTableComment(table)) + connection.execute(schema.DropTableComment(table)) + eq_( + inspect(connection).get_table_comment("test_table"), {"text": None} + ) __all__ = ("TableDDLTest",) diff --git a/lib/sqlalchemy/testing/suite/test_deprecations.py b/lib/sqlalchemy/testing/suite/test_deprecations.py index d0202a0a9..126d82fe9 100644 --- a/lib/sqlalchemy/testing/suite/test_deprecations.py +++ b/lib/sqlalchemy/testing/suite/test_deprecations.py @@ -24,20 +24,21 @@ class DeprecatedCompoundSelectTest(fixtures.TablesTest): @classmethod def insert_data(cls): - config.db.execute( - cls.tables.some_table.insert(), - [ - {"id": 1, "x": 1, "y": 2}, - {"id": 2, "x": 2, "y": 3}, - {"id": 3, "x": 3, "y": 4}, - {"id": 4, "x": 4, "y": 5}, - ], - ) - - def _assert_result(self, select, result, params=()): - eq_(config.db.execute(select, params).fetchall(), result) - - def test_plain_union(self): + with config.db.connect() as conn: + conn.execute( + cls.tables.some_table.insert(), + [ + {"id": 1, "x": 1, "y": 2}, + {"id": 2, "x": 2, "y": 3}, + {"id": 3, "x": 3, "y": 4}, + {"id": 4, "x": 4, "y": 5}, + ], + ) + + def _assert_result(self, conn, select, result, params=()): + eq_(conn.execute(select, params).fetchall(), result) + + def test_plain_union(self, connection): table = self.tables.some_table s1 = select([table]).where(table.c.id == 2) s2 = select([table]).where(table.c.id == 3) @@ -47,7 +48,9 @@ class DeprecatedCompoundSelectTest(fixtures.TablesTest): "The SelectBase.c and SelectBase.columns " "attributes are deprecated" ): - self._assert_result(u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)]) + self._assert_result( + connection, u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)] + ) # note we've had to remove one use case entirely, which is this # one. the Select gets its FROMS from the WHERE clause and the @@ -56,7 +59,7 @@ class DeprecatedCompoundSelectTest(fixtures.TablesTest): # ORDER BY without adding the SELECT into the FROM and breaking the # query. Users will have to adjust for this use case if they were doing # it before. - def _dont_test_select_from_plain_union(self): + def _dont_test_select_from_plain_union(self, connection): table = self.tables.some_table s1 = select([table]).where(table.c.id == 2) s2 = select([table]).where(table.c.id == 3) @@ -66,11 +69,13 @@ class DeprecatedCompoundSelectTest(fixtures.TablesTest): "The SelectBase.c and SelectBase.columns " "attributes are deprecated" ): - self._assert_result(u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)]) + self._assert_result( + connection, u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)] + ) @testing.requires.order_by_col_from_union @testing.requires.parens_in_union_contained_select_w_limit_offset - def test_limit_offset_selectable_in_unions(self): + def test_limit_offset_selectable_in_unions(self, connection): table = self.tables.some_table s1 = ( select([table]) @@ -90,10 +95,12 @@ class DeprecatedCompoundSelectTest(fixtures.TablesTest): "The SelectBase.c and SelectBase.columns " "attributes are deprecated" ): - self._assert_result(u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)]) + self._assert_result( + connection, u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)] + ) @testing.requires.parens_in_union_contained_select_wo_limit_offset - def test_order_by_selectable_in_unions(self): + def test_order_by_selectable_in_unions(self, connection): table = self.tables.some_table s1 = select([table]).where(table.c.id == 2).order_by(table.c.id) s2 = select([table]).where(table.c.id == 3).order_by(table.c.id) @@ -103,9 +110,11 @@ class DeprecatedCompoundSelectTest(fixtures.TablesTest): "The SelectBase.c and SelectBase.columns " "attributes are deprecated" ): - self._assert_result(u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)]) + self._assert_result( + connection, u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)] + ) - def test_distinct_selectable_in_unions(self): + def test_distinct_selectable_in_unions(self, connection): table = self.tables.some_table s1 = select([table]).where(table.c.id == 2).distinct() s2 = select([table]).where(table.c.id == 3).distinct() @@ -115,9 +124,11 @@ class DeprecatedCompoundSelectTest(fixtures.TablesTest): "The SelectBase.c and SelectBase.columns " "attributes are deprecated" ): - self._assert_result(u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)]) + self._assert_result( + connection, u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)] + ) - def test_limit_offset_aliased_selectable_in_unions(self): + def test_limit_offset_aliased_selectable_in_unions(self, connection): table = self.tables.some_table s1 = ( select([table]) @@ -141,4 +152,6 @@ class DeprecatedCompoundSelectTest(fixtures.TablesTest): "The SelectBase.c and SelectBase.columns " "attributes are deprecated" ): - self._assert_result(u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)]) + self._assert_result( + connection, u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)] + ) diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py index 931b0ef65..f449b2fe6 100644 --- a/lib/sqlalchemy/testing/suite/test_insert.py +++ b/lib/sqlalchemy/testing/suite/test_insert.py @@ -41,28 +41,28 @@ class LastrowidTest(fixtures.TablesTest): def _assert_round_trip(self, table, conn): row = conn.execute(table.select()).first() - eq_(row, (config.db.dialect.default_sequence_base, "some data")) + eq_(row, (conn.engine.dialect.default_sequence_base, "some data")) - def test_autoincrement_on_insert(self): + def test_autoincrement_on_insert(self, connection): - config.db.execute(self.tables.autoinc_pk.insert(), data="some data") - self._assert_round_trip(self.tables.autoinc_pk, config.db) + connection.execute(self.tables.autoinc_pk.insert(), data="some data") + self._assert_round_trip(self.tables.autoinc_pk, connection) - def test_last_inserted_id(self): + def test_last_inserted_id(self, connection): - r = config.db.execute( + r = connection.execute( self.tables.autoinc_pk.insert(), data="some data" ) - pk = config.db.scalar(select([self.tables.autoinc_pk.c.id])) + pk = connection.scalar(select([self.tables.autoinc_pk.c.id])) eq_(r.inserted_primary_key, [pk]) @requirements.dbapi_lastrowid - def test_native_lastrowid_autoinc(self): - r = config.db.execute( + def test_native_lastrowid_autoinc(self, connection): + r = connection.execute( self.tables.autoinc_pk.insert(), data="some data" ) lastrowid = r.lastrowid - pk = config.db.scalar(select([self.tables.autoinc_pk.c.id])) + pk = connection.scalar(select([self.tables.autoinc_pk.c.id])) eq_(lastrowid, pk) @@ -117,8 +117,8 @@ class InsertBehaviorTest(fixtures.TablesTest): assert not r.returns_rows @requirements.returning - def test_autoclose_on_insert_implicit_returning(self): - r = config.db.execute( + def test_autoclose_on_insert_implicit_returning(self, connection): + r = connection.execute( self.tables.autoinc_pk.insert(), data="some data" ) assert r._soft_closed @@ -127,12 +127,12 @@ class InsertBehaviorTest(fixtures.TablesTest): assert not r.returns_rows @requirements.empty_inserts - def test_empty_insert(self): - r = config.db.execute(self.tables.autoinc_pk.insert()) + def test_empty_insert(self, connection): + r = connection.execute(self.tables.autoinc_pk.insert()) assert r._soft_closed assert not r.closed - r = config.db.execute( + r = connection.execute( self.tables.autoinc_pk.select().where( self.tables.autoinc_pk.c.id != None ) @@ -141,10 +141,10 @@ class InsertBehaviorTest(fixtures.TablesTest): assert len(r.fetchall()) @requirements.insert_from_select - def test_insert_from_select_autoinc(self): + def test_insert_from_select_autoinc(self, connection): src_table = self.tables.manual_pk dest_table = self.tables.autoinc_pk - config.db.execute( + connection.execute( src_table.insert(), [ dict(id=1, data="data1"), @@ -153,7 +153,7 @@ class InsertBehaviorTest(fixtures.TablesTest): ], ) - result = config.db.execute( + result = connection.execute( dest_table.insert().from_select( ("data",), select([src_table.c.data]).where( @@ -164,17 +164,17 @@ class InsertBehaviorTest(fixtures.TablesTest): eq_(result.inserted_primary_key, [None]) - result = config.db.execute( + result = connection.execute( select([dest_table.c.data]).order_by(dest_table.c.data) ) eq_(result.fetchall(), [("data2",), ("data3",)]) @requirements.insert_from_select - def test_insert_from_select_autoinc_no_rows(self): + def test_insert_from_select_autoinc_no_rows(self, connection): src_table = self.tables.manual_pk dest_table = self.tables.autoinc_pk - result = config.db.execute( + result = connection.execute( dest_table.insert().from_select( ("data",), select([src_table.c.data]).where( @@ -184,16 +184,16 @@ class InsertBehaviorTest(fixtures.TablesTest): ) eq_(result.inserted_primary_key, [None]) - result = config.db.execute( + result = connection.execute( select([dest_table.c.data]).order_by(dest_table.c.data) ) eq_(result.fetchall(), []) @requirements.insert_from_select - def test_insert_from_select(self): + def test_insert_from_select(self, connection): table = self.tables.manual_pk - config.db.execute( + connection.execute( table.insert(), [ dict(id=1, data="data1"), @@ -202,7 +202,7 @@ class InsertBehaviorTest(fixtures.TablesTest): ], ) - config.db.execute( + connection.execute( table.insert(inline=True).from_select( ("id", "data"), select([table.c.id + 5, table.c.data]).where( @@ -212,16 +212,16 @@ class InsertBehaviorTest(fixtures.TablesTest): ) eq_( - config.db.execute( + connection.execute( select([table.c.data]).order_by(table.c.data) ).fetchall(), [("data1",), ("data2",), ("data2",), ("data3",), ("data3",)], ) @requirements.insert_from_select - def test_insert_from_select_with_defaults(self): + def test_insert_from_select_with_defaults(self, connection): table = self.tables.includes_defaults - config.db.execute( + connection.execute( table.insert(), [ dict(id=1, data="data1"), @@ -230,7 +230,7 @@ class InsertBehaviorTest(fixtures.TablesTest): ], ) - config.db.execute( + connection.execute( table.insert(inline=True).from_select( ("id", "data"), select([table.c.id + 5, table.c.data]).where( @@ -240,7 +240,7 @@ class InsertBehaviorTest(fixtures.TablesTest): ) eq_( - config.db.execute( + connection.execute( select([table]).order_by(table.c.data, table.c.id) ).fetchall(), [ @@ -262,7 +262,7 @@ class ReturningTest(fixtures.TablesTest): def _assert_round_trip(self, table, conn): row = conn.execute(table.select()).first() - eq_(row, (config.db.dialect.default_sequence_base, "some data")) + eq_(row, (conn.engine.dialect.default_sequence_base, "some data")) @classmethod def define_tables(cls, metadata): @@ -276,39 +276,35 @@ class ReturningTest(fixtures.TablesTest): ) @requirements.fetch_rows_post_commit - def test_explicit_returning_pk_autocommit(self): - engine = config.db + def test_explicit_returning_pk_autocommit(self, connection): table = self.tables.autoinc_pk - with engine.begin() as conn: - r = conn.execute( - table.insert().returning(table.c.id), data="some data" - ) + r = connection.execute( + table.insert().returning(table.c.id), data="some data" + ) pk = r.first()[0] - fetched_pk = config.db.scalar(select([table.c.id])) + fetched_pk = connection.scalar(select([table.c.id])) eq_(fetched_pk, pk) - def test_explicit_returning_pk_no_autocommit(self): - engine = config.db + def test_explicit_returning_pk_no_autocommit(self, connection): table = self.tables.autoinc_pk - with engine.begin() as conn: - r = conn.execute( - table.insert().returning(table.c.id), data="some data" - ) - pk = r.first()[0] - fetched_pk = config.db.scalar(select([table.c.id])) + r = connection.execute( + table.insert().returning(table.c.id), data="some data" + ) + pk = r.first()[0] + fetched_pk = connection.scalar(select([table.c.id])) eq_(fetched_pk, pk) - def test_autoincrement_on_insert_implicit_returning(self): + def test_autoincrement_on_insert_implicit_returning(self, connection): - config.db.execute(self.tables.autoinc_pk.insert(), data="some data") - self._assert_round_trip(self.tables.autoinc_pk, config.db) + connection.execute(self.tables.autoinc_pk.insert(), data="some data") + self._assert_round_trip(self.tables.autoinc_pk, connection) - def test_last_inserted_id_implicit_returning(self): + def test_last_inserted_id_implicit_returning(self, connection): - r = config.db.execute( + r = connection.execute( self.tables.autoinc_pk.insert(), data="some data" ) - pk = config.db.scalar(select([self.tables.autoinc_pk.c.id])) + pk = connection.scalar(select([self.tables.autoinc_pk.c.id])) eq_(r.inserted_primary_key, [pk]) diff --git a/lib/sqlalchemy/testing/suite/test_select.py b/lib/sqlalchemy/testing/suite/test_select.py index 2c2317ff7..23f78961f 100644 --- a/lib/sqlalchemy/testing/suite/test_select.py +++ b/lib/sqlalchemy/testing/suite/test_select.py @@ -42,16 +42,18 @@ class CollateTest(fixtures.TablesTest): @classmethod def insert_data(cls): - config.db.execute( - cls.tables.some_table.insert(), - [ - {"id": 1, "data": "collate data1"}, - {"id": 2, "data": "collate data2"}, - ], - ) + with config.db.connect() as conn: + conn.execute( + cls.tables.some_table.insert(), + [ + {"id": 1, "data": "collate data1"}, + {"id": 2, "data": "collate data2"}, + ], + ) def _assert_result(self, select, result): - eq_(config.db.execute(select).fetchall(), result) + with config.db.connect() as conn: + eq_(conn.execute(select).fetchall(), result) @testing.requires.order_by_collation def test_collate_order_by(self): @@ -90,17 +92,19 @@ class OrderByLabelTest(fixtures.TablesTest): @classmethod def insert_data(cls): - config.db.execute( - cls.tables.some_table.insert(), - [ - {"id": 1, "x": 1, "y": 2, "q": "q1", "p": "p3"}, - {"id": 2, "x": 2, "y": 3, "q": "q2", "p": "p2"}, - {"id": 3, "x": 3, "y": 4, "q": "q3", "p": "p1"}, - ], - ) + with config.db.connect() as conn: + conn.execute( + cls.tables.some_table.insert(), + [ + {"id": 1, "x": 1, "y": 2, "q": "q1", "p": "p3"}, + {"id": 2, "x": 2, "y": 3, "q": "q2", "p": "p2"}, + {"id": 3, "x": 3, "y": 4, "q": "q3", "p": "p1"}, + ], + ) def _assert_result(self, select, result): - eq_(config.db.execute(select).fetchall(), result) + with config.db.connect() as conn: + eq_(conn.execute(select).fetchall(), result) def test_plain(self): table = self.tables.some_table @@ -162,18 +166,20 @@ class LimitOffsetTest(fixtures.TablesTest): @classmethod def insert_data(cls): - config.db.execute( - cls.tables.some_table.insert(), - [ - {"id": 1, "x": 1, "y": 2}, - {"id": 2, "x": 2, "y": 3}, - {"id": 3, "x": 3, "y": 4}, - {"id": 4, "x": 4, "y": 5}, - ], - ) + with config.db.connect() as conn: + conn.execute( + cls.tables.some_table.insert(), + [ + {"id": 1, "x": 1, "y": 2}, + {"id": 2, "x": 2, "y": 3}, + {"id": 3, "x": 3, "y": 4}, + {"id": 4, "x": 4, "y": 5}, + ], + ) def _assert_result(self, select, result, params=()): - eq_(config.db.execute(select, params).fetchall(), result) + with config.db.connect() as conn: + eq_(conn.execute(select, params).fetchall(), result) def _assert_result_str(self, select, result, params=()): conn = config.db.connect(close_with_result=True) @@ -303,7 +309,8 @@ class JoinTest(fixtures.TablesTest): __backend__ = True def _assert_result(self, select, result, params=()): - eq_(config.db.execute(select, params).fetchall(), result) + with config.db.connect() as conn: + eq_(conn.execute(select, params).fetchall(), result) @classmethod def define_tables(cls, metadata): @@ -317,20 +324,21 @@ class JoinTest(fixtures.TablesTest): @classmethod def insert_data(cls): - config.db.execute( - cls.tables.a.insert(), - [{"id": 1}, {"id": 2}, {"id": 3}, {"id": 4}, {"id": 5}], - ) + with config.db.connect() as conn: + conn.execute( + cls.tables.a.insert(), + [{"id": 1}, {"id": 2}, {"id": 3}, {"id": 4}, {"id": 5}], + ) - config.db.execute( - cls.tables.b.insert(), - [ - {"id": 1, "a_id": 1}, - {"id": 2, "a_id": 1}, - {"id": 4, "a_id": 2}, - {"id": 5, "a_id": 3}, - ], - ) + conn.execute( + cls.tables.b.insert(), + [ + {"id": 1, "a_id": 1}, + {"id": 2, "a_id": 1}, + {"id": 4, "a_id": 2}, + {"id": 5, "a_id": 3}, + ], + ) def test_inner_join_fk(self): a, b = self.tables("a", "b") @@ -413,18 +421,20 @@ class CompoundSelectTest(fixtures.TablesTest): @classmethod def insert_data(cls): - config.db.execute( - cls.tables.some_table.insert(), - [ - {"id": 1, "x": 1, "y": 2}, - {"id": 2, "x": 2, "y": 3}, - {"id": 3, "x": 3, "y": 4}, - {"id": 4, "x": 4, "y": 5}, - ], - ) + with config.db.connect() as conn: + conn.execute( + cls.tables.some_table.insert(), + [ + {"id": 1, "x": 1, "y": 2}, + {"id": 2, "x": 2, "y": 3}, + {"id": 3, "x": 3, "y": 4}, + {"id": 4, "x": 4, "y": 5}, + ], + ) def _assert_result(self, select, result, params=()): - eq_(config.db.execute(select, params).fetchall(), result) + with config.db.connect() as conn: + eq_(conn.execute(select, params).fetchall(), result) def test_plain_union(self): table = self.tables.some_table @@ -556,15 +566,16 @@ class PostCompileParamsTest( @classmethod def insert_data(cls): - config.db.execute( - cls.tables.some_table.insert(), - [ - {"id": 1, "x": 1, "y": 2, "z": "z1"}, - {"id": 2, "x": 2, "y": 3, "z": "z2"}, - {"id": 3, "x": 3, "y": 4, "z": "z3"}, - {"id": 4, "x": 4, "y": 5, "z": "z4"}, - ], - ) + with config.db.connect() as conn: + conn.execute( + cls.tables.some_table.insert(), + [ + {"id": 1, "x": 1, "y": 2, "z": "z1"}, + {"id": 2, "x": 2, "y": 3, "z": "z2"}, + {"id": 3, "x": 3, "y": 4, "z": "z3"}, + {"id": 4, "x": 4, "y": 5, "z": "z4"}, + ], + ) def test_compile(self): table = self.tables.some_table @@ -673,18 +684,20 @@ class ExpandingBoundInTest(fixtures.TablesTest): @classmethod def insert_data(cls): - config.db.execute( - cls.tables.some_table.insert(), - [ - {"id": 1, "x": 1, "y": 2, "z": "z1"}, - {"id": 2, "x": 2, "y": 3, "z": "z2"}, - {"id": 3, "x": 3, "y": 4, "z": "z3"}, - {"id": 4, "x": 4, "y": 5, "z": "z4"}, - ], - ) + with config.db.connect() as conn: + conn.execute( + cls.tables.some_table.insert(), + [ + {"id": 1, "x": 1, "y": 2, "z": "z1"}, + {"id": 2, "x": 2, "y": 3, "z": "z2"}, + {"id": 3, "x": 3, "y": 4, "z": "z3"}, + {"id": 4, "x": 4, "y": 5, "z": "z4"}, + ], + ) def _assert_result(self, select, result, params=()): - eq_(config.db.execute(select, params).fetchall(), result) + with config.db.connect() as conn: + eq_(conn.execute(select, params).fetchall(), result) def test_multiple_empty_sets(self): # test that any anonymous aliasing used by the dialect @@ -837,7 +850,7 @@ class ExpandingBoundInTest(fixtures.TablesTest): self._assert_result(stmt, [(1,), (2,), (3,), (4,)], params={"q": []}) - def test_null_in_empty_set_is_false(self): + def test_null_in_empty_set_is_false(self, connection): stmt = select( [ case( @@ -853,7 +866,7 @@ class ExpandingBoundInTest(fixtures.TablesTest): ) ] ) - in_(config.db.execute(stmt).fetchone()[0], (False, 0)) + in_(connection.execute(stmt).fetchone()[0], (False, 0)) class LikeFunctionsTest(fixtures.TablesTest): @@ -873,21 +886,22 @@ class LikeFunctionsTest(fixtures.TablesTest): @classmethod def insert_data(cls): - config.db.execute( - cls.tables.some_table.insert(), - [ - {"id": 1, "data": "abcdefg"}, - {"id": 2, "data": "ab/cdefg"}, - {"id": 3, "data": "ab%cdefg"}, - {"id": 4, "data": "ab_cdefg"}, - {"id": 5, "data": "abcde/fg"}, - {"id": 6, "data": "abcde%fg"}, - {"id": 7, "data": "ab#cdefg"}, - {"id": 8, "data": "ab9cdefg"}, - {"id": 9, "data": "abcde#fg"}, - {"id": 10, "data": "abcd9fg"}, - ], - ) + with config.db.connect() as conn: + conn.execute( + cls.tables.some_table.insert(), + [ + {"id": 1, "data": "abcdefg"}, + {"id": 2, "data": "ab/cdefg"}, + {"id": 3, "data": "ab%cdefg"}, + {"id": 4, "data": "ab_cdefg"}, + {"id": 5, "data": "abcde/fg"}, + {"id": 6, "data": "abcde%fg"}, + {"id": 7, "data": "ab#cdefg"}, + {"id": 8, "data": "ab9cdefg"}, + {"id": 9, "data": "abcde#fg"}, + {"id": 10, "data": "abcd9fg"}, + ], + ) def _test(self, expr, expected): some_table = self.tables.some_table @@ -1015,7 +1029,16 @@ class IsOrIsNotDistinctFromTest(fixtures.TablesTest): __backend__ = True __requires__ = ("supports_is_distinct_from",) - @testing.provide_metadata + @classmethod + def define_tables(cls, metadata): + Table( + "is_distinct_test", + metadata, + Column("id", Integer, primary_key=True), + Column("col_a", Integer, nullable=True), + Column("col_b", Integer, nullable=True), + ) + @testing.combinations( ("both_int_different", 0, 1, 1), ("both_int_same", 1, 1, 0), @@ -1028,15 +1051,8 @@ class IsOrIsNotDistinctFromTest(fixtures.TablesTest): def test_is_or_isnot_distinct_from( self, col_a_value, col_b_value, expected_row_count_for_is, connection ): - meta = self.metadata - tbl = Table( - "is_distinct_test", - meta, - Column("id", Integer, primary_key=True), - Column("col_a", Integer, nullable=True), - Column("col_b", Integer, nullable=True), - ) - meta.create_all() + tbl = self.tables.is_distinct_test + connection.execute( tbl.insert(), [{"id": 1, "col_a": col_a_value, "col_b": col_b_value}], diff --git a/lib/sqlalchemy/testing/suite/test_sequence.py b/lib/sqlalchemy/testing/suite/test_sequence.py index 22ae7d43c..db5582c21 100644 --- a/lib/sqlalchemy/testing/suite/test_sequence.py +++ b/lib/sqlalchemy/testing/suite/test_sequence.py @@ -39,21 +39,21 @@ class SequenceTest(fixtures.TablesTest): Column("data", String(50)), ) - def test_insert_roundtrip(self): - config.db.execute(self.tables.seq_pk.insert(), data="some data") - self._assert_round_trip(self.tables.seq_pk, config.db) + def test_insert_roundtrip(self, connection): + connection.execute(self.tables.seq_pk.insert(), data="some data") + self._assert_round_trip(self.tables.seq_pk, connection) - def test_insert_lastrowid(self): - r = config.db.execute(self.tables.seq_pk.insert(), data="some data") + def test_insert_lastrowid(self, connection): + r = connection.execute(self.tables.seq_pk.insert(), data="some data") eq_(r.inserted_primary_key, [1]) - def test_nextval_direct(self): - r = config.db.execute(self.tables.seq_pk.c.id.default) + def test_nextval_direct(self, connection): + r = connection.execute(self.tables.seq_pk.c.id.default) eq_(r, 1) @requirements.sequences_optional - def test_optional_seq(self): - r = config.db.execute( + def test_optional_seq(self, connection): + r = connection.execute( self.tables.seq_opt_pk.insert(), data="some data" ) eq_(r.inserted_primary_key, [1]) @@ -67,7 +67,7 @@ class SequenceCompilerTest(testing.AssertsCompiledSQL, fixtures.TestBase): __requires__ = ("sequences",) __backend__ = True - def test_literal_binds_inline_compile(self): + def test_literal_binds_inline_compile(self, connection): table = Table( "x", MetaData(), @@ -77,14 +77,14 @@ class SequenceCompilerTest(testing.AssertsCompiledSQL, fixtures.TestBase): stmt = table.insert().values(q=5) - seq_nextval = testing.db.dialect.statement_compiler( - statement=None, dialect=testing.db.dialect + seq_nextval = connection.dialect.statement_compiler( + statement=None, dialect=connection.dialect ).visit_sequence(Sequence("y_seq")) self.assert_compile( stmt, "INSERT INTO x (y, q) VALUES (%s, 5)" % (seq_nextval,), literal_binds=True, - dialect=testing.db.dialect, + dialect=connection.dialect, ) @@ -92,65 +92,65 @@ class HasSequenceTest(fixtures.TestBase): __requires__ = ("sequences",) __backend__ = True - def test_has_sequence(self): + def test_has_sequence(self, connection): s1 = Sequence("user_id_seq") - testing.db.execute(schema.CreateSequence(s1)) + connection.execute(schema.CreateSequence(s1)) try: eq_( - testing.db.dialect.has_sequence(testing.db, "user_id_seq"), + connection.dialect.has_sequence(connection, "user_id_seq"), True, ) finally: - testing.db.execute(schema.DropSequence(s1)) + connection.execute(schema.DropSequence(s1)) @testing.requires.schemas - def test_has_sequence_schema(self): + def test_has_sequence_schema(self, connection): s1 = Sequence("user_id_seq", schema=config.test_schema) - testing.db.execute(schema.CreateSequence(s1)) + connection.execute(schema.CreateSequence(s1)) try: eq_( - testing.db.dialect.has_sequence( - testing.db, "user_id_seq", schema=config.test_schema + connection.dialect.has_sequence( + connection, "user_id_seq", schema=config.test_schema ), True, ) finally: - testing.db.execute(schema.DropSequence(s1)) + connection.execute(schema.DropSequence(s1)) - def test_has_sequence_neg(self): - eq_(testing.db.dialect.has_sequence(testing.db, "user_id_seq"), False) + def test_has_sequence_neg(self, connection): + eq_(connection.dialect.has_sequence(connection, "user_id_seq"), False) @testing.requires.schemas - def test_has_sequence_schemas_neg(self): + def test_has_sequence_schemas_neg(self, connection): eq_( - testing.db.dialect.has_sequence( - testing.db, "user_id_seq", schema=config.test_schema + connection.dialect.has_sequence( + connection, "user_id_seq", schema=config.test_schema ), False, ) @testing.requires.schemas - def test_has_sequence_default_not_in_remote(self): + def test_has_sequence_default_not_in_remote(self, connection): s1 = Sequence("user_id_seq") - testing.db.execute(schema.CreateSequence(s1)) + connection.execute(schema.CreateSequence(s1)) try: eq_( - testing.db.dialect.has_sequence( - testing.db, "user_id_seq", schema=config.test_schema + connection.dialect.has_sequence( + connection, "user_id_seq", schema=config.test_schema ), False, ) finally: - testing.db.execute(schema.DropSequence(s1)) + connection.execute(schema.DropSequence(s1)) @testing.requires.schemas - def test_has_sequence_remote_not_in_default(self): + def test_has_sequence_remote_not_in_default(self, connection): s1 = Sequence("user_id_seq", schema=config.test_schema) - testing.db.execute(schema.CreateSequence(s1)) + connection.execute(schema.CreateSequence(s1)) try: eq_( - testing.db.dialect.has_sequence(testing.db, "user_id_seq"), + connection.dialect.has_sequence(connection, "user_id_seq"), False, ) finally: - testing.db.execute(schema.DropSequence(s1)) + connection.execute(schema.DropSequence(s1)) diff --git a/lib/sqlalchemy/testing/suite/test_types.py b/lib/sqlalchemy/testing/suite/test_types.py index 9dabdbd65..7719a3b3c 100644 --- a/lib/sqlalchemy/testing/suite/test_types.py +++ b/lib/sqlalchemy/testing/suite/test_types.py @@ -519,10 +519,10 @@ class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase): filter_=lambda n: n is not None and round(n, 5) or None, ) - def test_float_coerce_round_trip(self): + def test_float_coerce_round_trip(self, connection): expr = 15.7563 - val = testing.db.scalar(select([literal(expr)])) + val = connection.scalar(select([literal(expr)])) eq_(val, expr) # this does not work in MySQL, see #4036, however we choose not @@ -530,17 +530,17 @@ class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase): @testing.requires.implicit_decimal_binds @testing.emits_warning(r".*does \*not\* support Decimal objects natively") - def test_decimal_coerce_round_trip(self): + def test_decimal_coerce_round_trip(self, connection): expr = decimal.Decimal("15.7563") - val = testing.db.scalar(select([literal(expr)])) + val = connection.scalar(select([literal(expr)])) eq_(val, expr) @testing.emits_warning(r".*does \*not\* support Decimal objects natively") - def test_decimal_coerce_round_trip_w_cast(self): + def test_decimal_coerce_round_trip_w_cast(self, connection): expr = decimal.Decimal("15.7563") - val = testing.db.scalar(select([cast(expr, Numeric(10, 4))])) + val = connection.scalar(select([cast(expr, Numeric(10, 4))])) eq_(val, expr) @testing.requires.precision_numerics_general diff --git a/lib/sqlalchemy/testing/suite/test_update_delete.py b/lib/sqlalchemy/testing/suite/test_update_delete.py index 97bdf0ad7..6003a0994 100644 --- a/lib/sqlalchemy/testing/suite/test_update_delete.py +++ b/lib/sqlalchemy/testing/suite/test_update_delete.py @@ -22,33 +22,34 @@ class SimpleUpdateDeleteTest(fixtures.TablesTest): @classmethod def insert_data(cls): - config.db.execute( - cls.tables.plain_pk.insert(), - [ - {"id": 1, "data": "d1"}, - {"id": 2, "data": "d2"}, - {"id": 3, "data": "d3"}, - ], - ) - - def test_update(self): + with config.db.connect() as conn: + conn.execute( + cls.tables.plain_pk.insert(), + [ + {"id": 1, "data": "d1"}, + {"id": 2, "data": "d2"}, + {"id": 3, "data": "d3"}, + ], + ) + + def test_update(self, connection): t = self.tables.plain_pk - r = config.db.execute(t.update().where(t.c.id == 2), data="d2_new") + r = connection.execute(t.update().where(t.c.id == 2), data="d2_new") assert not r.is_insert assert not r.returns_rows eq_( - config.db.execute(t.select().order_by(t.c.id)).fetchall(), + connection.execute(t.select().order_by(t.c.id)).fetchall(), [(1, "d1"), (2, "d2_new"), (3, "d3")], ) - def test_delete(self): + def test_delete(self, connection): t = self.tables.plain_pk - r = config.db.execute(t.delete().where(t.c.id == 2)) + r = connection.execute(t.delete().where(t.c.id == 2)) assert not r.is_insert assert not r.returns_rows eq_( - config.db.execute(t.select().order_by(t.c.id)).fetchall(), + connection.execute(t.select().order_by(t.c.id)).fetchall(), [(1, "d1"), (3, "d3")], ) diff --git a/lib/sqlalchemy/util/deprecations.py b/lib/sqlalchemy/util/deprecations.py index 4bc37bf04..ad734a1c3 100644 --- a/lib/sqlalchemy/util/deprecations.py +++ b/lib/sqlalchemy/util/deprecations.py @@ -21,6 +21,7 @@ from .. import exc def _warn_with_version(msg, version, type_, stacklevel): warn = type_(msg) warn.deprecated_since = version + warnings.warn(warn, stacklevel=stacklevel + 1) @@ -219,7 +220,7 @@ def _sanitize_restructured_text(text): return name text = re.sub(r":ref:`(.+) <.*>`", lambda m: '"%s"' % m.group(1), text) - return re.sub(r"\:(\w+)\:`~?\.?(.+?)`", repl, text) + return re.sub(r"\:(\w+)\:`~?(?:_\w+)?\.?(.+?)`", repl, text) def _decorate_cls_with_warning( diff --git a/test/sql/test_deprecations.py b/test/sql/test_deprecations.py index 7c115789b..13d4cd154 100644 --- a/test/sql/test_deprecations.py +++ b/test/sql/test_deprecations.py @@ -880,20 +880,24 @@ class KeyTargetingTest(fixtures.TablesTest): @classmethod def insert_data(cls): - cls.tables.keyed1.insert().execute(dict(b="a1", q="c1")) - cls.tables.keyed2.insert().execute(dict(a="a2", b="b2")) - cls.tables.keyed3.insert().execute(dict(a="a3", d="d3")) - cls.tables.keyed4.insert().execute(dict(b="b4", q="q4")) - cls.tables.content.insert().execute(type="t1") - - if testing.requires.schemas.enabled: - cls.tables[ - "%s.wschema" % testing.config.test_schema - ].insert().execute(dict(b="a1", q="c1")) + with testing.db.connect() as conn: + conn.execute(cls.tables.keyed1.insert(), dict(b="a1", q="c1")) + conn.execute(cls.tables.keyed2.insert(), dict(a="a2", b="b2")) + conn.execute(cls.tables.keyed3.insert(), dict(a="a3", d="d3")) + conn.execute(cls.tables.keyed4.insert(), dict(b="b4", q="q4")) + conn.execute(cls.tables.content.insert(), type="t1") + + if testing.requires.schemas.enabled: + conn.execute( + cls.tables[ + "%s.wschema" % testing.config.test_schema + ].insert(), + dict(b="a1", q="c1"), + ) - def test_column_label_overlap_fallback(self): + def test_column_label_overlap_fallback(self, connection): content, bar = self.tables.content, self.tables.bar - row = testing.db.execute( + row = connection.execute( select([content.c.type.label("content_type")]) ).first() @@ -906,7 +910,7 @@ class KeyTargetingTest(fixtures.TablesTest): ): in_(sql.column("content_type"), row) - row = testing.db.execute( + row = connection.execute( select([func.now().label("content_type")]) ).first() not_in_(content.c.type, row) @@ -917,7 +921,7 @@ class KeyTargetingTest(fixtures.TablesTest): ): in_(sql.column("content_type"), row) - def test_columnclause_schema_column_one(self): + def test_columnclause_schema_column_one(self, connection): keyed2 = self.tables.keyed2 # this is addressed by [ticket:2932] @@ -926,7 +930,7 @@ class KeyTargetingTest(fixtures.TablesTest): # cols, which results in a more liberal comparison scheme a, b = sql.column("a"), sql.column("b") stmt = select([a, b]).select_from(table("keyed2")) - row = testing.db.execute(stmt).first() + row = connection.execute(stmt).first() with testing.expect_deprecated( "Retreiving row values using Column objects " @@ -939,12 +943,12 @@ class KeyTargetingTest(fixtures.TablesTest): ): in_(keyed2.c.b, row) - def test_columnclause_schema_column_two(self): + def test_columnclause_schema_column_two(self, connection): keyed2 = self.tables.keyed2 a, b = sql.column("a"), sql.column("b") stmt = select([keyed2.c.a, keyed2.c.b]) - row = testing.db.execute(stmt).first() + row = connection.execute(stmt).first() with testing.expect_deprecated( "Retreiving row values using Column objects " @@ -957,7 +961,7 @@ class KeyTargetingTest(fixtures.TablesTest): ): in_(b, row) - def test_columnclause_schema_column_three(self): + def test_columnclause_schema_column_three(self, connection): keyed2 = self.tables.keyed2 # originally addressed by [ticket:2932], however liberalized @@ -965,7 +969,7 @@ class KeyTargetingTest(fixtures.TablesTest): a, b = sql.column("a"), sql.column("b") stmt = text("select a, b from keyed2").columns(a=CHAR, b=CHAR) - row = testing.db.execute(stmt).first() + row = connection.execute(stmt).first() with testing.expect_deprecated( "Retreiving row values using Column objects " @@ -1000,7 +1004,7 @@ class KeyTargetingTest(fixtures.TablesTest): ): in_(stmt.c.b, row) - def test_columnclause_schema_column_four(self): + def test_columnclause_schema_column_four(self, connection): keyed2 = self.tables.keyed2 # this is also addressed by [ticket:2932] @@ -1009,7 +1013,7 @@ class KeyTargetingTest(fixtures.TablesTest): stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns( a, b ) - row = testing.db.execute(stmt).first() + row = connection.execute(stmt).first() with testing.expect_deprecated( "Retreiving row values using Column objects " @@ -1034,7 +1038,7 @@ class KeyTargetingTest(fixtures.TablesTest): ): in_(stmt.c.keyed2_b, row) - def test_columnclause_schema_column_five(self): + def test_columnclause_schema_column_five(self, connection): keyed2 = self.tables.keyed2 # this is also addressed by [ticket:2932] @@ -1042,7 +1046,7 @@ class KeyTargetingTest(fixtures.TablesTest): stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns( keyed2_a=CHAR, keyed2_b=CHAR ) - row = testing.db.execute(stmt).first() + row = connection.execute(stmt).first() with testing.expect_deprecated( "Retreiving row values using Column objects " @@ -1115,12 +1119,12 @@ class ResultProxyTest(fixtures.TablesTest): dict(user_id=2, user_name="jack"), ) - def test_column_accessor_textual_select(self): + def test_column_accessor_textual_select(self, connection): users = self.tables.users # this will create column() objects inside # the select(), these need to match on name anyway - r = testing.db.execute( + r = connection.execute( select([column("user_id"), column("user_name")]) .select_from(table("users")) .where(text("user_id=2")) @@ -1147,10 +1151,10 @@ class ResultProxyTest(fixtures.TablesTest): ): eq_(r._mapping[users.c.user_name], "jack") - def test_column_accessor_basic_text(self): + def test_column_accessor_basic_text(self, connection): users = self.tables.users - r = testing.db.execute( + r = connection.execute( text("select * from users where user_id=2") ).first() @@ -1185,13 +1189,13 @@ class ResultProxyTest(fixtures.TablesTest): eq_(r._mapping[users.c.user_name], "jack") @testing.provide_metadata - def test_column_label_overlap_fallback(self): + def test_column_label_overlap_fallback(self, connection): content = Table("content", self.metadata, Column("type", String(30))) bar = Table("bar", self.metadata, Column("content_type", String(30))) self.metadata.create_all(testing.db) - testing.db.execute(content.insert().values(type="t1")) + connection.execute(content.insert().values(type="t1")) - row = testing.db.execute(content.select(use_labels=True)).first() + row = connection.execute(content.select(use_labels=True)).first() in_(content.c.type, row._mapping) not_in_(bar.c.content_type, row) with testing.expect_deprecated( @@ -1200,7 +1204,7 @@ class ResultProxyTest(fixtures.TablesTest): ): in_(sql.column("content_type"), row) - row = testing.db.execute( + row = connection.execute( select([content.c.type.label("content_type")]) ).first() with testing.expect_deprecated( @@ -1217,7 +1221,7 @@ class ResultProxyTest(fixtures.TablesTest): ): in_(sql.column("content_type"), row) - row = testing.db.execute( + row = connection.execute( select([func.now().label("content_type")]) ).first() @@ -1243,80 +1247,80 @@ class ResultProxyTest(fixtures.TablesTest): {"user_id": 9, "user_name": "fred"}, ) - for pickle in False, True: - for use_labels in False, True: - result = ( - users.select(use_labels=use_labels) - .order_by(users.c.user_id) - .execute() - .fetchall() - ) + for pickle in False, True: + for use_labels in False, True: + result = conn.execute( + users.select(use_labels=use_labels).order_by( + users.c.user_id + ) + ).fetchall() + + if pickle: + result = util.pickle.loads(util.pickle.dumps(result)) + + if pickle: + with testing.expect_deprecated( + "Retreiving row values using Column objects " + "from a row that was unpickled" + ): + eq_(result[0]._mapping[users.c.user_id], 7) + + result[0]._keymap.pop(users.c.user_id) + with testing.expect_deprecated( + "Retreiving row values using Column objects " + "from a row that was unpickled" + ): + eq_(result[0]._mapping[users.c.user_id], 7) + + with testing.expect_deprecated( + "Retreiving row values using Column objects " + "from a row that was unpickled" + ): + eq_(result[0]._mapping[users.c.user_name], "jack") + + result[0]._keymap.pop(users.c.user_name) + with testing.expect_deprecated( + "Retreiving row values using Column objects " + "from a row that was unpickled" + ): + eq_(result[0]._mapping[users.c.user_name], "jack") + + if not pickle or use_labels: + assert_raises( + exc.NoSuchColumnError, + lambda: result[0][addresses.c.user_id], + ) + + assert_raises( + exc.NoSuchColumnError, + lambda: result[0]._mapping[addresses.c.user_id], + ) + else: + # test with a different table. name resolution is + # causing 'user_id' to match when use_labels wasn't + # used. + with testing.expect_deprecated( + "Retreiving row values using Column objects " + "from a row that was unpickled" + ): + eq_(result[0]._mapping[addresses.c.user_id], 7) + + result[0]._keymap.pop(addresses.c.user_id) + with testing.expect_deprecated( + "Retreiving row values using Column objects " + "from a row that was unpickled" + ): + eq_(result[0]._mapping[addresses.c.user_id], 7) - if pickle: - result = util.pickle.loads(util.pickle.dumps(result)) - - if pickle: - with testing.expect_deprecated( - "Retreiving row values using Column objects " - "from a row that was unpickled" - ): - eq_(result[0]._mapping[users.c.user_id], 7) - - result[0]._keymap.pop(users.c.user_id) - with testing.expect_deprecated( - "Retreiving row values using Column objects " - "from a row that was unpickled" - ): - eq_(result[0]._mapping[users.c.user_id], 7) - - with testing.expect_deprecated( - "Retreiving row values using Column objects " - "from a row that was unpickled" - ): - eq_(result[0]._mapping[users.c.user_name], "jack") - - result[0]._keymap.pop(users.c.user_name) - with testing.expect_deprecated( - "Retreiving row values using Column objects " - "from a row that was unpickled" - ): - eq_(result[0]._mapping[users.c.user_name], "jack") - - if not pickle or use_labels: assert_raises( exc.NoSuchColumnError, - lambda: result[0][addresses.c.user_id], + lambda: result[0][addresses.c.address_id], ) assert_raises( exc.NoSuchColumnError, - lambda: result[0]._mapping[addresses.c.user_id], + lambda: result[0]._mapping[addresses.c.address_id], ) - else: - # test with a different table. name resolution is - # causing 'user_id' to match when use_labels wasn't used. - with testing.expect_deprecated( - "Retreiving row values using Column objects " - "from a row that was unpickled" - ): - eq_(result[0]._mapping[addresses.c.user_id], 7) - - result[0]._keymap.pop(addresses.c.user_id) - with testing.expect_deprecated( - "Retreiving row values using Column objects " - "from a row that was unpickled" - ): - eq_(result[0]._mapping[addresses.c.user_id], 7) - - assert_raises( - exc.NoSuchColumnError, - lambda: result[0][addresses.c.address_id], - ) - - assert_raises( - exc.NoSuchColumnError, - lambda: result[0]._mapping[addresses.c.address_id], - ) @testing.requires.duplicate_names_in_cursor_description def test_ambiguous_column_case_sensitive(self): @@ -1325,105 +1329,111 @@ class ResultProxyTest(fixtures.TablesTest): ): eng = engines.testing_engine(options=dict(case_sensitive=False)) - row = eng.execute( - select( - [ - literal_column("1").label("SOMECOL"), - literal_column("1").label("SOMECOL"), - ] - ) - ).first() + with eng.connect() as conn: + row = conn.execute( + select( + [ + literal_column("1").label("SOMECOL"), + literal_column("1").label("SOMECOL"), + ] + ) + ).first() - assert_raises_message( - exc.InvalidRequestError, - "Ambiguous column name", - lambda: row._mapping["somecol"], - ) + assert_raises_message( + exc.InvalidRequestError, + "Ambiguous column name", + lambda: row._mapping["somecol"], + ) - def test_row_getitem_string(self): - with testing.db.connect() as conn: - col = literal_column("1").label("foo") - row = conn.execute(select([col])).first() + def test_row_getitem_string(self, connection): + col = literal_column("1").label("foo") + row = connection.execute(select([col])).first() - with testing.expect_deprecated( - "Using non-integer/slice indices on Row is deprecated " - "and will be removed in version 2.0;" - ): - eq_(row["foo"], 1) + with testing.expect_deprecated( + "Using non-integer/slice indices on Row is deprecated " + "and will be removed in version 2.0;" + ): + eq_(row["foo"], 1) - eq_(row._mapping["foo"], 1) + eq_(row._mapping["foo"], 1) - def test_row_getitem_column(self): - with testing.db.connect() as conn: - col = literal_column("1").label("foo") - row = conn.execute(select([col])).first() + def test_row_getitem_column(self, connection): + col = literal_column("1").label("foo") + row = connection.execute(select([col])).first() - with testing.expect_deprecated( - "Using non-integer/slice indices on Row is deprecated " - "and will be removed in version 2.0;" - ): - eq_(row[col], 1) + with testing.expect_deprecated( + "Using non-integer/slice indices on Row is deprecated " + "and will be removed in version 2.0;" + ): + eq_(row[col], 1) - eq_(row._mapping[col], 1) + eq_(row._mapping[col], 1) def test_row_case_insensitive(self): with testing.expect_deprecated( "The create_engine.case_sensitive parameter is deprecated" ): - ins_db = engines.testing_engine(options={"case_sensitive": False}) - row = ins_db.execute( - select( - [ - literal_column("1").label("case_insensitive"), - literal_column("2").label("CaseSensitive"), - ] - ) - ).first() + with engines.testing_engine( + options={"case_sensitive": False} + ).connect() as ins_conn: + row = ins_conn.execute( + select( + [ + literal_column("1").label("case_insensitive"), + literal_column("2").label("CaseSensitive"), + ] + ) + ).first() - eq_(list(row._mapping.keys()), ["case_insensitive", "CaseSensitive"]) + eq_( + list(row._mapping.keys()), + ["case_insensitive", "CaseSensitive"], + ) - in_("case_insensitive", row._keymap) - in_("CaseSensitive", row._keymap) - in_("casesensitive", row._keymap) + in_("case_insensitive", row._keymap) + in_("CaseSensitive", row._keymap) + in_("casesensitive", row._keymap) - eq_(row._mapping["case_insensitive"], 1) - eq_(row._mapping["CaseSensitive"], 2) - eq_(row._mapping["Case_insensitive"], 1) - eq_(row._mapping["casesensitive"], 2) + eq_(row._mapping["case_insensitive"], 1) + eq_(row._mapping["CaseSensitive"], 2) + eq_(row._mapping["Case_insensitive"], 1) + eq_(row._mapping["casesensitive"], 2) def test_row_case_insensitive_unoptimized(self): with testing.expect_deprecated( "The create_engine.case_sensitive parameter is deprecated" ): - ins_db = engines.testing_engine(options={"case_sensitive": False}) - row = ins_db.execute( - select( - [ - literal_column("1").label("case_insensitive"), - literal_column("2").label("CaseSensitive"), - text("3 AS screw_up_the_cols"), - ] - ) - ).first() + with engines.testing_engine( + options={"case_sensitive": False} + ).connect() as ins_conn: + row = ins_conn.execute( + select( + [ + literal_column("1").label("case_insensitive"), + literal_column("2").label("CaseSensitive"), + text("3 AS screw_up_the_cols"), + ] + ) + ).first() - eq_( - list(row._mapping.keys()), - ["case_insensitive", "CaseSensitive", "screw_up_the_cols"], - ) + eq_( + list(row._mapping.keys()), + ["case_insensitive", "CaseSensitive", "screw_up_the_cols"], + ) - in_("case_insensitive", row._keymap) - in_("CaseSensitive", row._keymap) - in_("casesensitive", row._keymap) + in_("case_insensitive", row._keymap) + in_("CaseSensitive", row._keymap) + in_("casesensitive", row._keymap) - eq_(row._mapping["case_insensitive"], 1) - eq_(row._mapping["CaseSensitive"], 2) - eq_(row._mapping["screw_up_the_cols"], 3) - eq_(row._mapping["Case_insensitive"], 1) - eq_(row._mapping["casesensitive"], 2) - eq_(row._mapping["screw_UP_the_cols"], 3) + eq_(row._mapping["case_insensitive"], 1) + eq_(row._mapping["CaseSensitive"], 2) + eq_(row._mapping["screw_up_the_cols"], 3) + eq_(row._mapping["Case_insensitive"], 1) + eq_(row._mapping["casesensitive"], 2) + eq_(row._mapping["screw_UP_the_cols"], 3) - def test_row_keys_deprecated(self): - r = testing.db.execute( + def test_row_keys_deprecated(self, connection): + r = connection.execute( text("select * from users where user_id=2") ).first() @@ -1432,8 +1442,8 @@ class ResultProxyTest(fixtures.TablesTest): ): eq_(r.keys(), ["user_id", "user_name"]) - def test_row_contains_key_deprecated(self): - r = testing.db.execute( + def test_row_contains_key_deprecated(self, connection): + r = connection.execute( text("select * from users where user_id=2") ).first() @@ -1472,11 +1482,13 @@ class PositionalTextTest(fixtures.TablesTest): @classmethod def insert_data(cls): - cls.tables.text1.insert().execute( - [dict(a="a1", b="b1", c="c1", d="d1")] - ) + with testing.db.connect() as conn: + conn.execute( + cls.tables.text1.insert(), + [dict(a="a1", b="b1", c="c1", d="d1")], + ) - def test_anon_aliased_overlapping(self): + def test_anon_aliased_overlapping(self, connection): text1 = self.tables.text1 c1 = text1.c.a.label(None) @@ -1485,7 +1497,7 @@ class PositionalTextTest(fixtures.TablesTest): c4 = text1.c.a.label(None) stmt = text("select a, b, c, d from text1").columns(c1, c2, c3, c4) - result = testing.db.execute(stmt) + result = connection.execute(stmt) row = result.first() with testing.expect_deprecated( @@ -1494,7 +1506,7 @@ class PositionalTextTest(fixtures.TablesTest): ): eq_(row._mapping[text1.c.a], "a1") - def test_anon_aliased_unique(self): + def test_anon_aliased_unique(self, connection): text1 = self.tables.text1 c1 = text1.c.a.label(None) @@ -1503,7 +1515,7 @@ class PositionalTextTest(fixtures.TablesTest): c4 = text1.alias().c.d.label(None) stmt = text("select a, b, c, d from text1").columns(c1, c2, c3, c4) - result = testing.db.execute(stmt) + result = connection.execute(stmt) row = result.first() eq_(row._mapping[c1], "a1") diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py index afe5bdb59..fac369bb4 100644 --- a/test/sql/test_metadata.py +++ b/test/sql/test_metadata.py @@ -32,6 +32,7 @@ from sqlalchemy import UniqueConstraint from sqlalchemy.engine import default from sqlalchemy.schema import AddConstraint from sqlalchemy.schema import CreateIndex +from sqlalchemy.schema import DefaultClause from sqlalchemy.schema import DropIndex from sqlalchemy.sql import naming from sqlalchemy.sql import operators @@ -628,6 +629,22 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): (Sequence("my_seq", start=5), "Sequence('my_seq', start=5)"), (Column("foo", Integer), "Column('foo', Integer(), table=None)"), ( + Column( + "foo", + Integer, + primary_key=True, + nullable=False, + onupdate=1, + default=42, + server_default="42", + comment="foo", + ), + "Column('foo', Integer(), table=None, primary_key=True, " + "nullable=False, onupdate=%s, default=%s, server_default=%s, " + "comment='foo')" + % (ColumnDefault(1), ColumnDefault(42), DefaultClause("42"),), + ), + ( Table("bar", MetaData(), Column("x", String)), "Table('bar', MetaData(bind=None), " "Column('x', String(), table=<bar>), schema=None)", diff --git a/test/sql/test_query.py b/test/sql/test_query.py index 83c5da342..bca7c262b 100644 --- a/test/sql/test_query.py +++ b/test/sql/test_query.py @@ -84,9 +84,10 @@ class QueryTest(fixtures.TestBase): @engines.close_first def teardown(self): - addresses.delete().execute() - users.delete().execute() - users2.delete().execute() + with testing.db.connect() as conn: + conn.execute(addresses.delete()) + conn.execute(users.delete()) + conn.execute(users2.delete()) @classmethod def teardown_class(cls): @@ -95,7 +96,7 @@ class QueryTest(fixtures.TestBase): @testing.fails_on( "firebird", "kinterbasdb doesn't send full type information" ) - def test_order_by_label(self): + def test_order_by_label(self, connection): """test that a label within an ORDER BY works on each backend. This test should be modified to support [ticket:1068] when that ticket @@ -104,7 +105,8 @@ class QueryTest(fixtures.TestBase): """ - users.insert().execute( + connection.execute( + users.insert(), {"user_id": 7, "user_name": "jack"}, {"user_id": 8, "user_name": "ed"}, {"user_id": 9, "user_name": "fred"}, @@ -112,24 +114,31 @@ class QueryTest(fixtures.TestBase): concat = ("test: " + users.c.user_name).label("thedata") eq_( - select([concat]).order_by("thedata").execute().fetchall(), + connection.execute( + select([concat]).order_by("thedata") + ).fetchall(), [("test: ed",), ("test: fred",), ("test: jack",)], ) eq_( - select([concat]).order_by("thedata").execute().fetchall(), + connection.execute( + select([concat]).order_by("thedata") + ).fetchall(), [("test: ed",), ("test: fred",), ("test: jack",)], ) concat = ("test: " + users.c.user_name).label("thedata") eq_( - select([concat]).order_by(desc("thedata")).execute().fetchall(), + connection.execute( + select([concat]).order_by(desc("thedata")) + ).fetchall(), [("test: jack",), ("test: fred",), ("test: ed",)], ) @testing.requires.order_by_label_with_expression - def test_order_by_label_compound(self): - users.insert().execute( + def test_order_by_label_compound(self, connection): + connection.execute( + users.insert(), {"user_id": 7, "user_name": "jack"}, {"user_id": 8, "user_name": "ed"}, {"user_id": 9, "user_name": "fred"}, @@ -137,27 +146,26 @@ class QueryTest(fixtures.TestBase): concat = ("test: " + users.c.user_name).label("thedata") eq_( - select([concat]) - .order_by(literal_column("thedata") + "x") - .execute() - .fetchall(), + connection.execute( + select([concat]).order_by(literal_column("thedata") + "x") + ).fetchall(), [("test: ed",), ("test: fred",), ("test: jack",)], ) @testing.requires.boolean_col_expressions - def test_or_and_as_columns(self): + def test_or_and_as_columns(self, connection): true, false = literal(True), literal(False) - eq_(testing.db.execute(select([and_(true, false)])).scalar(), False) - eq_(testing.db.execute(select([and_(true, true)])).scalar(), True) - eq_(testing.db.execute(select([or_(true, false)])).scalar(), True) - eq_(testing.db.execute(select([or_(false, false)])).scalar(), False) + eq_(connection.execute(select([and_(true, false)])).scalar(), False) + eq_(connection.execute(select([and_(true, true)])).scalar(), True) + eq_(connection.execute(select([or_(true, false)])).scalar(), True) + eq_(connection.execute(select([or_(false, false)])).scalar(), False) eq_( - testing.db.execute(select([not_(or_(false, false))])).scalar(), + connection.execute(select([not_(or_(false, false))])).scalar(), True, ) - row = testing.db.execute( + row = connection.execute( select( [or_(false, false).label("x"), and_(true, false).label("y")] ) @@ -165,14 +173,15 @@ class QueryTest(fixtures.TestBase): assert row.x == False # noqa assert row.y == False # noqa - row = testing.db.execute( + row = connection.execute( select([or_(true, false).label("x"), and_(true, false).label("y")]) ).first() assert row.x == True # noqa assert row.y == False # noqa - def test_like_ops(self): - users.insert().execute( + def test_like_ops(self, connection): + connection.execute( + users.insert(), {"user_id": 1, "user_name": "apples"}, {"user_id": 2, "user_name": "oranges"}, {"user_id": 3, "user_name": "bananas"}, @@ -206,11 +215,11 @@ class QueryTest(fixtures.TestBase): [(5,)], ), ): - eq_(expr.execute().fetchall(), result) + eq_(connection.execute(expr).fetchall(), result) @testing.requires.mod_operator_as_percent_sign @testing.emits_warning(".*now automatically escapes.*") - def test_percents_in_text(self): + def test_percents_in_text(self, connection): for expr, result in ( (text("select 6 % 10"), 6), (text("select 17 % 10"), 7), @@ -219,10 +228,11 @@ class QueryTest(fixtures.TestBase): (text("select '%%%'"), "%%%"), (text("select 'hello % world'"), "hello % world"), ): - eq_(testing.db.scalar(expr), result) + eq_(connection.scalar(expr), result) - def test_ilike(self): - users.insert().execute( + def test_ilike(self, connection): + connection.execute( + users.insert(), {"user_id": 1, "user_name": "one"}, {"user_id": 2, "user_name": "TwO"}, {"user_id": 3, "user_name": "ONE"}, @@ -230,62 +240,62 @@ class QueryTest(fixtures.TestBase): ) eq_( - select([users.c.user_id]) - .where(users.c.user_name.ilike("one")) - .execute() - .fetchall(), + connection.execute( + select([users.c.user_id]).where(users.c.user_name.ilike("one")) + ).fetchall(), [(1,), (3,), (4,)], ) eq_( - select([users.c.user_id]) - .where(users.c.user_name.ilike("TWO")) - .execute() - .fetchall(), + connection.execute( + select([users.c.user_id]).where(users.c.user_name.ilike("TWO")) + ).fetchall(), [(2,)], ) if testing.against("postgresql"): eq_( - select([users.c.user_id]) - .where(users.c.user_name.like("one")) - .execute() - .fetchall(), + connection.execute( + select([users.c.user_id]).where( + users.c.user_name.like("one") + ) + ).fetchall(), [(1,)], ) eq_( - select([users.c.user_id]) - .where(users.c.user_name.like("TWO")) - .execute() - .fetchall(), + connection.execute( + select([users.c.user_id]).where( + users.c.user_name.like("TWO") + ) + ).fetchall(), [], ) - def test_compiled_execute(self): - users.insert().execute(user_id=7, user_name="jack") + def test_compiled_execute(self, connection): + connection.execute(users.insert(), user_id=7, user_name="jack") s = select([users], users.c.user_id == bindparam("id")).compile() - c = testing.db.connect() - eq_(c.execute(s, id=7).first()._mapping["user_id"], 7) + eq_(connection.execute(s, id=7).first()._mapping["user_id"], 7) - def test_compiled_insert_execute(self): - users.insert().compile().execute(user_id=7, user_name="jack") + def test_compiled_insert_execute(self, connection): + connection.execute( + users.insert().compile(), user_id=7, user_name="jack" + ) s = select([users], users.c.user_id == bindparam("id")).compile() - c = testing.db.connect() - eq_(c.execute(s, id=7).first()._mapping["user_id"], 7) + eq_(connection.execute(s, id=7).first()._mapping["user_id"], 7) - def test_repeated_bindparams(self): + def test_repeated_bindparams(self, connection): """Tests that a BindParam can be used more than once. This should be run for DB-APIs with both positional and named paramstyles. """ - users.insert().execute(user_id=7, user_name="jack") - users.insert().execute(user_id=8, user_name="fred") + connection.execute(users.insert(), user_id=7, user_name="jack") + connection.execute(users.insert(), user_id=8, user_name="fred") u = bindparam("userid") s = users.select(and_(users.c.user_name == u, users.c.user_name == u)) - r = s.execute(userid="fred").fetchall() + r = connection.execute(s, userid="fred").fetchall() assert len(r) == 1 def test_bindparam_detection(self): @@ -322,7 +332,7 @@ class QueryTest(fixtures.TestBase): a_eq(prep(r".\:that$ :other."), ".:that$ ?.") @testing.requires.standalone_binds - def test_select_from_bindparam(self): + def test_select_from_bindparam(self, connection): """Test result row processing when selecting from a plain bind param.""" @@ -336,28 +346,28 @@ class QueryTest(fixtures.TestBase): return "INT_%d" % value eq_( - testing.db.scalar(select([cast("INT_5", type_=MyInteger)])), + connection.scalar(select([cast("INT_5", type_=MyInteger)])), "INT_5", ) eq_( - testing.db.scalar( + connection.scalar( select([cast("INT_5", type_=MyInteger).label("foo")]) ), "INT_5", ) - def test_order_by(self): + def test_order_by(self, connection): """Exercises ORDER BY clause generation. Tests simple, compound, aliased and DESC clauses. """ - users.insert().execute(user_id=1, user_name="c") - users.insert().execute(user_id=2, user_name="b") - users.insert().execute(user_id=3, user_name="a") + connection.execute(users.insert(), user_id=1, user_name="c") + connection.execute(users.insert(), user_id=2, user_name="b") + connection.execute(users.insert(), user_id=3, user_name="a") def a_eq(executable, wanted): - got = list(executable.execute()) + got = list(connection.execute(executable)) eq_(got, wanted) for labels in False, True: @@ -444,18 +454,18 @@ class QueryTest(fixtures.TestBase): ) @testing.requires.nullsordering - def test_order_by_nulls(self): + def test_order_by_nulls(self, connection): """Exercises ORDER BY clause generation. Tests simple, compound, aliased and DESC clauses. """ - users.insert().execute(user_id=1) - users.insert().execute(user_id=2, user_name="b") - users.insert().execute(user_id=3, user_name="a") + connection.execute(users.insert(), user_id=1) + connection.execute(users.insert(), user_id=2, user_name="b") + connection.execute(users.insert(), user_id=3, user_name="a") def a_eq(executable, wanted): - got = list(executable.execute()) + got = list(connection.execute(executable)) eq_(got, wanted) for labels in False, True: @@ -538,33 +548,33 @@ class QueryTest(fixtures.TestBase): [(3, "a"), (2, "b"), (1, None)], ) - def test_in_filtering(self): + def test_in_filtering(self, connection): """test the behavior of the in_() function.""" - users.insert().execute(user_id=7, user_name="jack") - users.insert().execute(user_id=8, user_name="fred") - users.insert().execute(user_id=9, user_name=None) + connection.execute(users.insert(), user_id=7, user_name="jack") + connection.execute(users.insert(), user_id=8, user_name="fred") + connection.execute(users.insert(), user_id=9, user_name=None) s = users.select(users.c.user_name.in_([])) - r = s.execute().fetchall() + r = connection.execute(s).fetchall() # No username is in empty set assert len(r) == 0 s = users.select(not_(users.c.user_name.in_([]))) - r = s.execute().fetchall() + r = connection.execute(s).fetchall() assert len(r) == 3 s = users.select(users.c.user_name.in_(["jack", "fred"])) - r = s.execute().fetchall() + r = connection.execute(s).fetchall() assert len(r) == 2 s = users.select(not_(users.c.user_name.in_(["jack", "fred"]))) - r = s.execute().fetchall() + r = connection.execute(s).fetchall() # Null values are not outside any set assert len(r) == 0 - def test_expanding_in(self): - testing.db.execute( + def test_expanding_in(self, connection): + connection.execute( users.insert(), [ dict(user_id=7, user_name="jack"), @@ -573,40 +583,37 @@ class QueryTest(fixtures.TestBase): ], ) - with testing.db.connect() as conn: - stmt = ( - select([users]) - .where( - users.c.user_name.in_(bindparam("uname", expanding=True)) - ) - .order_by(users.c.user_id) - ) + stmt = ( + select([users]) + .where(users.c.user_name.in_(bindparam("uname", expanding=True))) + .order_by(users.c.user_id) + ) - eq_( - conn.execute(stmt, {"uname": ["jack"]}).fetchall(), - [(7, "jack")], - ) + eq_( + connection.execute(stmt, {"uname": ["jack"]}).fetchall(), + [(7, "jack")], + ) - eq_( - conn.execute(stmt, {"uname": ["jack", "fred"]}).fetchall(), - [(7, "jack"), (8, "fred")], - ) + eq_( + connection.execute(stmt, {"uname": ["jack", "fred"]}).fetchall(), + [(7, "jack"), (8, "fred")], + ) - eq_(conn.execute(stmt, {"uname": []}).fetchall(), []) + eq_(connection.execute(stmt, {"uname": []}).fetchall(), []) - assert_raises_message( - exc.StatementError, - "'expanding' parameters can't be used with executemany()", - conn.execute, - users.update().where( - users.c.user_name.in_(bindparam("uname", expanding=True)) - ), - [{"uname": ["fred"]}, {"uname": ["ed"]}], - ) + assert_raises_message( + exc.StatementError, + "'expanding' parameters can't be used with executemany()", + connection.execute, + users.update().where( + users.c.user_name.in_(bindparam("uname", expanding=True)) + ), + [{"uname": ["fred"]}, {"uname": ["ed"]}], + ) @testing.requires.no_quoting_special_bind_names - def test_expanding_in_special_chars(self): - testing.db.execute( + def test_expanding_in_special_chars(self, connection): + connection.execute( users.insert(), [ dict(user_id=7, user_name="jack"), @@ -614,39 +621,36 @@ class QueryTest(fixtures.TestBase): ], ) - with testing.db.connect() as conn: - stmt = ( - select([users]) - .where(users.c.user_name.in_(bindparam("u35", expanding=True))) - .where(users.c.user_id == bindparam("u46")) - .order_by(users.c.user_id) - ) + stmt = ( + select([users]) + .where(users.c.user_name.in_(bindparam("u35", expanding=True))) + .where(users.c.user_id == bindparam("u46")) + .order_by(users.c.user_id) + ) - eq_( - conn.execute( - stmt, {"u35": ["jack", "fred"], "u46": 7} - ).fetchall(), - [(7, "jack")], - ) + eq_( + connection.execute( + stmt, {"u35": ["jack", "fred"], "u46": 7} + ).fetchall(), + [(7, "jack")], + ) - stmt = ( - select([users]) - .where( - users.c.user_name.in_(bindparam("u.35", expanding=True)) - ) - .where(users.c.user_id == bindparam("u.46")) - .order_by(users.c.user_id) - ) + stmt = ( + select([users]) + .where(users.c.user_name.in_(bindparam("u.35", expanding=True))) + .where(users.c.user_id == bindparam("u.46")) + .order_by(users.c.user_id) + ) - eq_( - conn.execute( - stmt, {"u.35": ["jack", "fred"], "u.46": 7} - ).fetchall(), - [(7, "jack")], - ) + eq_( + connection.execute( + stmt, {"u.35": ["jack", "fred"], "u.46": 7} + ).fetchall(), + [(7, "jack")], + ) - def test_expanding_in_multiple(self): - testing.db.execute( + def test_expanding_in_multiple(self, connection): + connection.execute( users.insert(), [ dict(user_id=7, user_name="jack"), @@ -655,27 +659,22 @@ class QueryTest(fixtures.TestBase): ], ) - with testing.db.connect() as conn: - stmt = ( - select([users]) - .where( - users.c.user_name.in_(bindparam("uname", expanding=True)) - ) - .where( - users.c.user_id.in_(bindparam("userid", expanding=True)) - ) - .order_by(users.c.user_id) - ) + stmt = ( + select([users]) + .where(users.c.user_name.in_(bindparam("uname", expanding=True))) + .where(users.c.user_id.in_(bindparam("userid", expanding=True))) + .order_by(users.c.user_id) + ) - eq_( - conn.execute( - stmt, {"uname": ["jack", "fred", "ed"], "userid": [8, 9]} - ).fetchall(), - [(8, "fred"), (9, "ed")], - ) + eq_( + connection.execute( + stmt, {"uname": ["jack", "fred", "ed"], "userid": [8, 9]} + ).fetchall(), + [(8, "fred"), (9, "ed")], + ) - def test_expanding_in_repeated(self): - testing.db.execute( + def test_expanding_in_repeated(self, connection): + connection.execute( users.insert(), [ dict(user_id=7, user_name="jack"), @@ -684,43 +683,38 @@ class QueryTest(fixtures.TestBase): ], ) - with testing.db.connect() as conn: - stmt = ( - select([users]) - .where( - users.c.user_name.in_(bindparam("uname", expanding=True)) - | users.c.user_name.in_( - bindparam("uname2", expanding=True) - ) - ) - .where(users.c.user_id == 8) - ) - stmt = stmt.union( - select([users]) - .where( - users.c.user_name.in_(bindparam("uname", expanding=True)) - | users.c.user_name.in_( - bindparam("uname2", expanding=True) - ) - ) - .where(users.c.user_id == 9) - ).order_by("user_id") - - eq_( - conn.execute( - stmt, - { - "uname": ["jack", "fred"], - "uname2": ["ed"], - "userid": [8, 9], - }, - ).fetchall(), - [(8, "fred"), (9, "ed")], + stmt = ( + select([users]) + .where( + users.c.user_name.in_(bindparam("uname", expanding=True)) + | users.c.user_name.in_(bindparam("uname2", expanding=True)) + ) + .where(users.c.user_id == 8) + ) + stmt = stmt.union( + select([users]) + .where( + users.c.user_name.in_(bindparam("uname", expanding=True)) + | users.c.user_name.in_(bindparam("uname2", expanding=True)) ) + .where(users.c.user_id == 9) + ).order_by("user_id") + + eq_( + connection.execute( + stmt, + { + "uname": ["jack", "fred"], + "uname2": ["ed"], + "userid": [8, 9], + }, + ).fetchall(), + [(8, "fred"), (9, "ed")], + ) @testing.requires.tuple_in - def test_expanding_in_composite(self): - testing.db.execute( + def test_expanding_in_composite(self, connection): + connection.execute( users.insert(), [ dict(user_id=7, user_name="jack"), @@ -729,30 +723,29 @@ class QueryTest(fixtures.TestBase): ], ) - with testing.db.connect() as conn: - stmt = ( - select([users]) - .where( - tuple_(users.c.user_id, users.c.user_name).in_( - bindparam("uname", expanding=True) - ) + stmt = ( + select([users]) + .where( + tuple_(users.c.user_id, users.c.user_name).in_( + bindparam("uname", expanding=True) ) - .order_by(users.c.user_id) ) + .order_by(users.c.user_id) + ) - eq_( - conn.execute(stmt, {"uname": [(7, "jack")]}).fetchall(), - [(7, "jack")], - ) + eq_( + connection.execute(stmt, {"uname": [(7, "jack")]}).fetchall(), + [(7, "jack")], + ) - eq_( - conn.execute( - stmt, {"uname": [(7, "jack"), (8, "fred")]} - ).fetchall(), - [(7, "jack"), (8, "fred")], - ) + eq_( + connection.execute( + stmt, {"uname": [(7, "jack"), (8, "fred")]} + ).fetchall(), + [(7, "jack"), (8, "fred")], + ) - def test_expanding_in_dont_alter_compiled(self): + def test_expanding_in_dont_alter_compiled(self, connection): """test for issue #5048 """ class NameWithProcess(TypeDecorator): @@ -768,40 +761,37 @@ class QueryTest(fixtures.TestBase): Column("user_name", NameWithProcess()), ) - with testing.db.connect() as conn: - conn.execute( - users.insert(), - [ - dict(user_id=7, user_name="AB jack"), - dict(user_id=8, user_name="BE fred"), - dict(user_id=9, user_name="GP ed"), - ], - ) + connection.execute( + users.insert(), + [ + dict(user_id=7, user_name="AB jack"), + dict(user_id=8, user_name="BE fred"), + dict(user_id=9, user_name="GP ed"), + ], + ) - stmt = ( - select([users]) - .where( - users.c.user_name.in_(bindparam("uname", expanding=True)) - ) - .order_by(users.c.user_id) - ) + stmt = ( + select([users]) + .where(users.c.user_name.in_(bindparam("uname", expanding=True))) + .order_by(users.c.user_id) + ) - compiled = stmt.compile(testing.db) - eq_(len(compiled._bind_processors), 1) + compiled = stmt.compile(testing.db) + eq_(len(compiled._bind_processors), 1) - eq_( - conn.execute( - compiled, {"uname": ["HJ jack", "RR fred"]} - ).fetchall(), - [(7, "jack"), (8, "fred")], - ) + eq_( + connection.execute( + compiled, {"uname": ["HJ jack", "RR fred"]} + ).fetchall(), + [(7, "jack"), (8, "fred")], + ) - eq_(len(compiled._bind_processors), 1) + eq_(len(compiled._bind_processors), 1) @testing.fails_on("firebird", "uses sql-92 rules") @testing.fails_on("sybase", "uses sql-92 rules") @testing.skip_if(["mssql"]) - def test_bind_in(self): + def test_bind_in(self, connection): """test calling IN against a bind parameter. this isn't allowed on several platforms since we @@ -809,56 +799,55 @@ class QueryTest(fixtures.TestBase): """ - users.insert().execute(user_id=7, user_name="jack") - users.insert().execute(user_id=8, user_name="fred") - users.insert().execute(user_id=9, user_name=None) + connection.execute(users.insert(), user_id=7, user_name="jack") + connection.execute(users.insert(), user_id=8, user_name="fred") + connection.execute(users.insert(), user_id=9, user_name=None) u = bindparam("search_key", type_=String) s = users.select(not_(u.in_([]))) - r = s.execute(search_key="john").fetchall() + r = connection.execute(s, search_key="john").fetchall() assert len(r) == 3 - r = s.execute(search_key=None).fetchall() + r = connection.execute(s, search_key=None).fetchall() assert len(r) == 3 - def test_literal_in(self): + def test_literal_in(self, connection): """similar to test_bind_in but use a bind with a value.""" - users.insert().execute(user_id=7, user_name="jack") - users.insert().execute(user_id=8, user_name="fred") - users.insert().execute(user_id=9, user_name=None) + connection.execute(users.insert(), user_id=7, user_name="jack") + connection.execute(users.insert(), user_id=8, user_name="fred") + connection.execute(users.insert(), user_id=9, user_name=None) s = users.select(not_(literal("john").in_([]))) - r = s.execute().fetchall() + r = connection.execute(s).fetchall() assert len(r) == 3 @testing.requires.boolean_col_expressions - def test_empty_in_filtering_static(self): + def test_empty_in_filtering_static(self, connection): """test the behavior of the in_() function when comparing against an empty collection, specifically that a proper boolean value is generated. """ - with testing.db.connect() as conn: - conn.execute( - users.insert(), - [ - {"user_id": 7, "user_name": "jack"}, - {"user_id": 8, "user_name": "ed"}, - {"user_id": 9, "user_name": None}, - ], - ) + connection.execute( + users.insert(), + [ + {"user_id": 7, "user_name": "jack"}, + {"user_id": 8, "user_name": "ed"}, + {"user_id": 9, "user_name": None}, + ], + ) - s = users.select(users.c.user_name.in_([]) == True) # noqa - r = conn.execute(s).fetchall() - assert len(r) == 0 - s = users.select(users.c.user_name.in_([]) == False) # noqa - r = conn.execute(s).fetchall() - assert len(r) == 3 - s = users.select(users.c.user_name.in_([]) == None) # noqa - r = conn.execute(s).fetchall() - assert len(r) == 0 + s = users.select(users.c.user_name.in_([]) == True) # noqa + r = connection.execute(s).fetchall() + assert len(r) == 0 + s = users.select(users.c.user_name.in_([]) == False) # noqa + r = connection.execute(s).fetchall() + assert len(r) == 3 + s = users.select(users.c.user_name.in_([]) == None) # noqa + r = connection.execute(s).fetchall() + assert len(r) == 0 class RequiredBindTest(fixtures.TablesTest): @@ -951,98 +940,102 @@ class LimitTest(fixtures.TestBase): ) metadata.create_all() - users.insert().execute(user_id=1, user_name="john") - addresses.insert().execute(address_id=1, user_id=1, address="addr1") - users.insert().execute(user_id=2, user_name="jack") - addresses.insert().execute(address_id=2, user_id=2, address="addr1") - users.insert().execute(user_id=3, user_name="ed") - addresses.insert().execute(address_id=3, user_id=3, address="addr2") - users.insert().execute(user_id=4, user_name="wendy") - addresses.insert().execute(address_id=4, user_id=4, address="addr3") - users.insert().execute(user_id=5, user_name="laura") - addresses.insert().execute(address_id=5, user_id=5, address="addr4") - users.insert().execute(user_id=6, user_name="ralph") - addresses.insert().execute(address_id=6, user_id=6, address="addr5") - users.insert().execute(user_id=7, user_name="fido") - addresses.insert().execute(address_id=7, user_id=7, address="addr5") + with testing.db.connect() as conn: + conn.execute(users.insert(), user_id=1, user_name="john") + conn.execute( + addresses.insert(), address_id=1, user_id=1, address="addr1" + ) + conn.execute(users.insert(), user_id=2, user_name="jack") + conn.execute( + addresses.insert(), address_id=2, user_id=2, address="addr1" + ) + conn.execute(users.insert(), user_id=3, user_name="ed") + conn.execute( + addresses.insert(), address_id=3, user_id=3, address="addr2" + ) + conn.execute(users.insert(), user_id=4, user_name="wendy") + conn.execute( + addresses.insert(), address_id=4, user_id=4, address="addr3" + ) + conn.execute(users.insert(), user_id=5, user_name="laura") + conn.execute( + addresses.insert(), address_id=5, user_id=5, address="addr4" + ) + conn.execute(users.insert(), user_id=6, user_name="ralph") + conn.execute( + addresses.insert(), address_id=6, user_id=6, address="addr5" + ) + conn.execute(users.insert(), user_id=7, user_name="fido") + conn.execute( + addresses.insert(), address_id=7, user_id=7, address="addr5" + ) @classmethod def teardown_class(cls): metadata.drop_all() - def test_select_limit(self): - r = ( + def test_select_limit(self, connection): + r = connection.execute( users.select(limit=3, order_by=[users.c.user_id]) - .execute() - .fetchall() - ) + ).fetchall() self.assert_(r == [(1, "john"), (2, "jack"), (3, "ed")], repr(r)) @testing.requires.offset - def test_select_limit_offset(self): + def test_select_limit_offset(self, connection): """Test the interaction between limit and offset""" - r = ( + r = connection.execute( users.select(limit=3, offset=2, order_by=[users.c.user_id]) - .execute() - .fetchall() - ) + ).fetchall() self.assert_(r == [(3, "ed"), (4, "wendy"), (5, "laura")]) - r = ( + r = connection.execute( users.select(offset=5, order_by=[users.c.user_id]) - .execute() - .fetchall() - ) + ).fetchall() self.assert_(r == [(6, "ralph"), (7, "fido")]) - def test_select_distinct_limit(self): + def test_select_distinct_limit(self, connection): """Test the interaction between limit and distinct""" r = sorted( [ x[0] - for x in select([addresses.c.address]) - .distinct() - .limit(3) - .order_by(addresses.c.address) - .execute() - .fetchall() + for x in connection.execute( + select([addresses.c.address]).distinct().limit(3) + ) ] ) self.assert_(len(r) == 3, repr(r)) self.assert_(r[0] != r[1] and r[1] != r[2], repr(r)) @testing.requires.offset - def test_select_distinct_offset(self): + def test_select_distinct_offset(self, connection): """Test the interaction between distinct and offset""" r = sorted( [ x[0] - for x in select([addresses.c.address]) - .distinct() - .offset(1) - .order_by(addresses.c.address) - .execute() - .fetchall() + for x in connection.execute( + select([addresses.c.address]) + .distinct() + .offset(1) + .order_by(addresses.c.address) + ).fetchall() ] ) eq_(len(r), 4) self.assert_(r[0] != r[1] and r[1] != r[2] and r[2] != [3], repr(r)) @testing.requires.offset - def test_select_distinct_limit_offset(self): + def test_select_distinct_limit_offset(self, connection): """Test the interaction between limit and limit/offset""" - r = ( + r = connection.execute( select([addresses.c.address]) .order_by(addresses.c.address) .distinct() .offset(2) .limit(3) - .execute() - .fetchall() - ) + ).fetchall() self.assert_(len(r) == 3, repr(r)) self.assert_(r[0] != r[1] and r[1] != r[2], repr(r)) @@ -1099,27 +1092,31 @@ class CompoundTest(fixtures.TestBase): ) metadata.create_all() - t1.insert().execute( - [ - dict(col2="t1col2r1", col3="aaa", col4="aaa"), - dict(col2="t1col2r2", col3="bbb", col4="bbb"), - dict(col2="t1col2r3", col3="ccc", col4="ccc"), - ] - ) - t2.insert().execute( - [ - dict(col2="t2col2r1", col3="aaa", col4="bbb"), - dict(col2="t2col2r2", col3="bbb", col4="ccc"), - dict(col2="t2col2r3", col3="ccc", col4="aaa"), - ] - ) - t3.insert().execute( - [ - dict(col2="t3col2r1", col3="aaa", col4="ccc"), - dict(col2="t3col2r2", col3="bbb", col4="aaa"), - dict(col2="t3col2r3", col3="ccc", col4="bbb"), - ] - ) + with testing.db.connect() as conn: + conn.execute( + t1.insert(), + [ + dict(col2="t1col2r1", col3="aaa", col4="aaa"), + dict(col2="t1col2r2", col3="bbb", col4="bbb"), + dict(col2="t1col2r3", col3="ccc", col4="ccc"), + ], + ) + conn.execute( + t2.insert(), + [ + dict(col2="t2col2r1", col3="aaa", col4="bbb"), + dict(col2="t2col2r2", col3="bbb", col4="ccc"), + dict(col2="t2col2r3", col3="ccc", col4="aaa"), + ], + ) + conn.execute( + t3.insert(), + [ + dict(col2="t3col2r1", col3="aaa", col4="ccc"), + dict(col2="t3col2r2", col3="bbb", col4="aaa"), + dict(col2="t3col2r3", col3="ccc", col4="bbb"), + ], + ) @engines.close_first def teardown(self): @@ -1133,7 +1130,7 @@ class CompoundTest(fixtures.TestBase): return sorted([tuple(row) for row in executed.fetchall()]) @testing.requires.subqueries - def test_union(self): + def test_union(self, connection): (s1, s2) = ( select( [t1.c.col3.label("col3"), t1.c.col4.label("col4")], @@ -1152,14 +1149,16 @@ class CompoundTest(fixtures.TestBase): ("bbb", "ccc"), ("ccc", "aaa"), ] - found1 = self._fetchall_sorted(u.execute()) + found1 = self._fetchall_sorted(connection.execute(u)) eq_(found1, wanted) - found2 = self._fetchall_sorted(u.alias("bar").select().execute()) + found2 = self._fetchall_sorted( + connection.execute(u.alias("bar").select()) + ) eq_(found2, wanted) @testing.fails_on("firebird", "doesn't like ORDER BY with UNIONs") - def test_union_ordered(self): + def test_union_ordered(self, connection): (s1, s2) = ( select( [t1.c.col3.label("col3"), t1.c.col4.label("col4")], @@ -1178,11 +1177,11 @@ class CompoundTest(fixtures.TestBase): ("bbb", "ccc"), ("ccc", "aaa"), ] - eq_(u.execute().fetchall(), wanted) + eq_(connection.execute(u).fetchall(), wanted) @testing.fails_on("firebird", "doesn't like ORDER BY with UNIONs") @testing.requires.subqueries - def test_union_ordered_alias(self): + def test_union_ordered_alias(self, connection): (s1, s2) = ( select( [t1.c.col3.label("col3"), t1.c.col4.label("col4")], @@ -1201,7 +1200,7 @@ class CompoundTest(fixtures.TestBase): ("bbb", "ccc"), ("ccc", "aaa"), ] - eq_(u.alias("bar").select().execute().fetchall(), wanted) + eq_(connection.execute(u.alias("bar").select()).fetchall(), wanted) @testing.crashes("oracle", "FIXME: unknown, verify not fails_on") @testing.fails_on( @@ -1212,20 +1211,22 @@ class CompoundTest(fixtures.TestBase): testing.requires._mysql_not_mariadb_104, "FIXME: unknown" ) @testing.fails_on("sqlite", "FIXME: unknown") - def test_union_all(self): + def test_union_all(self, connection): e = union_all( select([t1.c.col3]), union(select([t1.c.col3]), select([t1.c.col3])), ) wanted = [("aaa",), ("aaa",), ("bbb",), ("bbb",), ("ccc",), ("ccc",)] - found1 = self._fetchall_sorted(e.execute()) + found1 = self._fetchall_sorted(connection.execute(e)) eq_(found1, wanted) - found2 = self._fetchall_sorted(e.alias("foo").select().execute()) + found2 = self._fetchall_sorted( + connection.execute(e.alias("foo").select()) + ) eq_(found2, wanted) - def test_union_all_lightweight(self): + def test_union_all_lightweight(self, connection): """like test_union_all, but breaks the sub-union into a subquery with an explicit column reference on the outside, more palatable to a wider variety of engines. @@ -1237,14 +1238,16 @@ class CompoundTest(fixtures.TestBase): e = union_all(select([t1.c.col3]), select([u.c.col3])) wanted = [("aaa",), ("aaa",), ("bbb",), ("bbb",), ("ccc",), ("ccc",)] - found1 = self._fetchall_sorted(e.execute()) + found1 = self._fetchall_sorted(connection.execute(e)) eq_(found1, wanted) - found2 = self._fetchall_sorted(e.alias("foo").select().execute()) + found2 = self._fetchall_sorted( + connection.execute(e.alias("foo").select()) + ) eq_(found2, wanted) @testing.requires.intersect - def test_intersect(self): + def test_intersect(self, connection): i = intersect( select([t2.c.col3, t2.c.col4]), select([t2.c.col3, t2.c.col4], t2.c.col4 == t3.c.col3), @@ -1252,15 +1255,17 @@ class CompoundTest(fixtures.TestBase): wanted = [("aaa", "bbb"), ("bbb", "ccc"), ("ccc", "aaa")] - found1 = self._fetchall_sorted(i.execute()) + found1 = self._fetchall_sorted(connection.execute(i)) eq_(found1, wanted) - found2 = self._fetchall_sorted(i.alias("bar").select().execute()) + found2 = self._fetchall_sorted( + connection.execute(i.alias("bar").select()) + ) eq_(found2, wanted) @testing.requires.except_ @testing.fails_on("sqlite", "Can't handle this style of nesting") - def test_except_style1(self): + def test_except_style1(self, connection): e = except_( union( select([t1.c.col3, t1.c.col4]), @@ -1279,11 +1284,11 @@ class CompoundTest(fixtures.TestBase): ("ccc", "ccc"), ] - found = self._fetchall_sorted(e.alias().select().execute()) + found = self._fetchall_sorted(connection.execute(e.alias().select())) eq_(found, wanted) @testing.requires.except_ - def test_except_style2(self): + def test_except_style2(self, connection): # same as style1, but add alias().select() to the except_(). # sqlite can handle it now. @@ -1307,10 +1312,10 @@ class CompoundTest(fixtures.TestBase): ("ccc", "ccc"), ] - found1 = self._fetchall_sorted(e.execute()) + found1 = self._fetchall_sorted(connection.execute(e)) eq_(found1, wanted) - found2 = self._fetchall_sorted(e.alias().select().execute()) + found2 = self._fetchall_sorted(connection.execute(e.alias().select())) eq_(found2, wanted) @testing.fails_on( @@ -1318,7 +1323,7 @@ class CompoundTest(fixtures.TestBase): "Can't handle this style of nesting", ) @testing.requires.except_ - def test_except_style3(self): + def test_except_style3(self, connection): # aaa, bbb, ccc - (aaa, bbb, ccc - (ccc)) = ccc e = except_( select([t1.c.col3]), # aaa, bbb, ccc @@ -1327,11 +1332,11 @@ class CompoundTest(fixtures.TestBase): select([t3.c.col3], t3.c.col3 == "ccc"), # ccc ), ) - eq_(e.execute().fetchall(), [("ccc",)]) - eq_(e.alias("foo").select().execute().fetchall(), [("ccc",)]) + eq_(connection.execute(e).fetchall(), [("ccc",)]) + eq_(connection.execute(e.alias("foo").select()).fetchall(), [("ccc",)]) @testing.requires.except_ - def test_except_style4(self): + def test_except_style4(self, connection): # aaa, bbb, ccc - (aaa, bbb, ccc - (ccc)) = ccc e = except_( select([t1.c.col3]), # aaa, bbb, ccc @@ -1343,15 +1348,15 @@ class CompoundTest(fixtures.TestBase): .select(), ) - eq_(e.execute().fetchall(), [("ccc",)]) - eq_(e.alias().select().execute().fetchall(), [("ccc",)]) + eq_(connection.execute(e).fetchall(), [("ccc",)]) + eq_(connection.execute(e.alias().select()).fetchall(), [("ccc",)]) @testing.requires.intersect @testing.fails_on( ["sqlite", testing.requires._mysql_not_mariadb_104], "sqlite can't handle leading parenthesis", ) - def test_intersect_unions(self): + def test_intersect_unions(self, connection): u = intersect( union( select([t1.c.col3, t1.c.col4]), select([t3.c.col3, t3.c.col4]) @@ -1363,12 +1368,12 @@ class CompoundTest(fixtures.TestBase): .select(), ) wanted = [("aaa", "ccc"), ("bbb", "aaa"), ("ccc", "bbb")] - found = self._fetchall_sorted(u.execute()) + found = self._fetchall_sorted(connection.execute(u)) eq_(found, wanted) @testing.requires.intersect - def test_intersect_unions_2(self): + def test_intersect_unions_2(self, connection): u = intersect( union( select([t1.c.col3, t1.c.col4]), select([t3.c.col3, t3.c.col4]) @@ -1382,12 +1387,12 @@ class CompoundTest(fixtures.TestBase): .select(), ) wanted = [("aaa", "ccc"), ("bbb", "aaa"), ("ccc", "bbb")] - found = self._fetchall_sorted(u.execute()) + found = self._fetchall_sorted(connection.execute(u)) eq_(found, wanted) @testing.requires.intersect - def test_intersect_unions_3(self): + def test_intersect_unions_3(self, connection): u = intersect( select([t2.c.col3, t2.c.col4]), union( @@ -1399,12 +1404,12 @@ class CompoundTest(fixtures.TestBase): .select(), ) wanted = [("aaa", "bbb"), ("bbb", "ccc"), ("ccc", "aaa")] - found = self._fetchall_sorted(u.execute()) + found = self._fetchall_sorted(connection.execute(u)) eq_(found, wanted) @testing.requires.intersect - def test_composite_alias(self): + def test_composite_alias(self, connection): ua = intersect( select([t2.c.col3, t2.c.col4]), union( @@ -1417,7 +1422,7 @@ class CompoundTest(fixtures.TestBase): ).alias() wanted = [("aaa", "bbb"), ("bbb", "ccc"), ("ccc", "aaa")] - found = self._fetchall_sorted(ua.select().execute()) + found = self._fetchall_sorted(connection.execute(ua.select())) eq_(found, wanted) @@ -1466,19 +1471,24 @@ class JoinTest(fixtures.TestBase): metadata.drop_all() metadata.create_all() - # t1.10 -> t2.20 -> t3.30 - # t1.11 -> t2.21 - # t1.12 - t1.insert().execute( - {"t1_id": 10, "name": "t1 #10"}, - {"t1_id": 11, "name": "t1 #11"}, - {"t1_id": 12, "name": "t1 #12"}, - ) - t2.insert().execute( - {"t2_id": 20, "t1_id": 10, "name": "t2 #20"}, - {"t2_id": 21, "t1_id": 11, "name": "t2 #21"}, - ) - t3.insert().execute({"t3_id": 30, "t2_id": 20, "name": "t3 #30"}) + with testing.db.connect() as conn: + # t1.10 -> t2.20 -> t3.30 + # t1.11 -> t2.21 + # t1.12 + conn.execute( + t1.insert(), + {"t1_id": 10, "name": "t1 #10"}, + {"t1_id": 11, "name": "t1 #11"}, + {"t1_id": 12, "name": "t1 #12"}, + ) + conn.execute( + t2.insert(), + {"t2_id": 20, "t1_id": 10, "name": "t2 #20"}, + {"t2_id": 21, "t1_id": 11, "name": "t2 #21"}, + ) + conn.execute( + t3.insert(), {"t3_id": 30, "t2_id": 20, "name": "t3 #30"} + ) @classmethod def teardown_class(cls): @@ -1486,10 +1496,11 @@ class JoinTest(fixtures.TestBase): def assertRows(self, statement, expected): """Execute a statement and assert that rows returned equal expected.""" - - found = sorted([tuple(row) for row in statement.execute().fetchall()]) - - eq_(found, sorted(expected)) + with testing.db.connect() as conn: + found = sorted( + [tuple(row) for row in conn.execute(statement).fetchall()] + ) + eq_(found, sorted(expected)) def test_join_x1(self): """Joins t1->t2.""" @@ -1805,30 +1816,35 @@ class OperatorTest(fixtures.TestBase): ) metadata.create_all() - flds.insert().execute( - [dict(intcol=5, strcol="foo"), dict(intcol=13, strcol="bar")] - ) + with testing.db.connect() as conn: + conn.execute( + flds.insert(), + [dict(intcol=5, strcol="foo"), dict(intcol=13, strcol="bar")], + ) @classmethod def teardown_class(cls): metadata.drop_all() # TODO: seems like more tests warranted for this setup. - def test_modulo(self): + def test_modulo(self, connection): eq_( - select([flds.c.intcol % 3], order_by=flds.c.idcol) - .execute() - .fetchall(), + connection.execute( + select([flds.c.intcol % 3], order_by=flds.c.idcol) + ).fetchall(), [(2,), (1,)], ) @testing.requires.window_functions - def test_over(self): + def test_over(self, connection): eq_( - select( - [flds.c.intcol, func.row_number().over(order_by=flds.c.strcol)] - ) - .execute() - .fetchall(), + connection.execute( + select( + [ + flds.c.intcol, + func.row_number().over(order_by=flds.c.strcol), + ] + ) + ).fetchall(), [(13, 1), (5, 2)], ) diff --git a/test/sql/test_resultset.py b/test/sql/test_resultset.py index 253ad7b38..f8d245228 100644 --- a/test/sql/test_resultset.py +++ b/test/sql/test_resultset.py @@ -90,29 +90,31 @@ class ResultProxyTest(fixtures.TablesTest): test_needs_acid=True, ) - def test_row_iteration(self): + def test_row_iteration(self, connection): users = self.tables.users - users.insert().execute( + connection.execute( + users.insert(), {"user_id": 7, "user_name": "jack"}, {"user_id": 8, "user_name": "ed"}, {"user_id": 9, "user_name": "fred"}, ) - r = users.select().execute() + r = connection.execute(users.select()) rows = [] for row in r: rows.append(row) eq_(len(rows), 3) - def test_row_next(self): + def test_row_next(self, connection): users = self.tables.users - users.insert().execute( + connection.execute( + users.insert(), {"user_id": 7, "user_name": "jack"}, {"user_id": 8, "user_name": "ed"}, {"user_id": 9, "user_name": "fred"}, ) - r = users.select().execute() + r = connection.execute(users.select()) rows = [] while True: row = next(r, "foo") @@ -122,10 +124,11 @@ class ResultProxyTest(fixtures.TablesTest): eq_(len(rows), 3) @testing.requires.subqueries - def test_anonymous_rows(self): + def test_anonymous_rows(self, connection): users = self.tables.users - users.insert().execute( + connection.execute( + users.insert(), {"user_id": 7, "user_name": "jack"}, {"user_id": 8, "user_name": "ed"}, {"user_id": 9, "user_name": "fred"}, @@ -136,15 +139,17 @@ class ResultProxyTest(fixtures.TablesTest): .where(users.c.user_name == "jack") .scalar_subquery() ) - for row in select([sel + 1, sel + 3], bind=users.bind).execute(): + for row in connection.execute( + select([sel + 1, sel + 3], bind=users.bind) + ): eq_(row._mapping["anon_1"], 8) eq_(row._mapping["anon_2"], 10) - def test_row_comparison(self): + def test_row_comparison(self, connection): users = self.tables.users - users.insert().execute(user_id=7, user_name="jack") - rp = users.select().execute().first() + connection.execute(users.insert(), user_id=7, user_name="jack") + rp = connection.execute(users.select()).first() eq_(rp, rp) is_(not (rp != rp), True) @@ -192,19 +197,19 @@ class ResultProxyTest(fixtures.TablesTest): eq_(control, op(compare, rp)) @testing.provide_metadata - def test_column_label_overlap_fallback(self): + def test_column_label_overlap_fallback(self, connection): content = Table("content", self.metadata, Column("type", String(30))) bar = Table("bar", self.metadata, Column("content_type", String(30))) - self.metadata.create_all(testing.db) - testing.db.execute(content.insert().values(type="t1")) + self.metadata.create_all(connection) + connection.execute(content.insert().values(type="t1")) - row = testing.db.execute(content.select(use_labels=True)).first() + row = connection.execute(content.select(use_labels=True)).first() in_(content.c.type, row._mapping) not_in_(bar.c.content_type, row._mapping) not_in_(bar.c.content_type, row._mapping) - row = testing.db.execute( + row = connection.execute( select([func.now().label("content_type")]) ).first() @@ -212,10 +217,11 @@ class ResultProxyTest(fixtures.TablesTest): not_in_(bar.c.content_type, row._mapping) - def test_pickled_rows(self): + def test_pickled_rows(self, connection): users = self.tables.users - users.insert().execute( + connection.execute( + users.insert(), {"user_id": 7, "user_name": "jack"}, {"user_id": 8, "user_name": "ed"}, {"user_id": 9, "user_name": "fred"}, @@ -223,12 +229,11 @@ class ResultProxyTest(fixtures.TablesTest): for pickle in False, True: for use_labels in False, True: - result = ( - users.select(use_labels=use_labels) - .order_by(users.c.user_id) - .execute() - .fetchall() - ) + result = connection.execute( + users.select(use_labels=use_labels).order_by( + users.c.user_id + ) + ).fetchall() if pickle: result = util.pickle.loads(util.pickle.dumps(result)) @@ -255,8 +260,8 @@ class ResultProxyTest(fixtures.TablesTest): lambda: result[0]._mapping["fake key"], ) - def test_column_error_printing(self): - result = testing.db.execute(select([1])) + def test_column_error_printing(self, connection): + result = connection.execute(select([1])) row = result.first() class unprintable(object): @@ -283,42 +288,45 @@ class ResultProxyTest(fixtures.TablesTest): lambda: row._mapping[accessor], ) - def test_fetchmany(self): + def test_fetchmany(self, connection): users = self.tables.users - users.insert().execute(user_id=7, user_name="jack") - users.insert().execute(user_id=8, user_name="ed") - users.insert().execute(user_id=9, user_name="fred") - r = users.select().execute() + connection.execute(users.insert(), user_id=7, user_name="jack") + connection.execute(users.insert(), user_id=8, user_name="ed") + connection.execute(users.insert(), user_id=9, user_name="fred") + r = connection.execute(users.select()) rows = [] for row in r.fetchmany(size=2): rows.append(row) eq_(len(rows), 2) - def test_column_slices(self): + def test_column_slices(self, connection): users = self.tables.users addresses = self.tables.addresses - users.insert().execute(user_id=1, user_name="john") - users.insert().execute(user_id=2, user_name="jack") - addresses.insert().execute( - address_id=1, user_id=2, address="foo@bar.com" + connection.execute(users.insert(), user_id=1, user_name="john") + connection.execute(users.insert(), user_id=2, user_name="jack") + connection.execute( + addresses.insert(), address_id=1, user_id=2, address="foo@bar.com" ) - r = text("select * from addresses", bind=testing.db).execute().first() + r = connection.execute( + text("select * from addresses", bind=testing.db) + ).first() eq_(r[0:1], (1,)) eq_(r[1:], (2, "foo@bar.com")) eq_(r[:-1], (1, 2)) - def test_column_accessor_basic_compiled_mapping(self): + def test_column_accessor_basic_compiled_mapping(self, connection): users = self.tables.users - users.insert().execute( + connection.execute( + users.insert(), dict(user_id=1, user_name="john"), dict(user_id=2, user_name="jack"), ) - r = users.select(users.c.user_id == 2).execute().first() + r = connection.execute(users.select(users.c.user_id == 2)).first() eq_(r.user_id, 2) eq_(r._mapping["user_id"], 2) eq_(r._mapping[users.c.user_id], 2) @@ -327,15 +335,16 @@ class ResultProxyTest(fixtures.TablesTest): eq_(r._mapping["user_name"], "jack") eq_(r._mapping[users.c.user_name], "jack") - def test_column_accessor_basic_compiled_traditional(self): + def test_column_accessor_basic_compiled_traditional(self, connection): users = self.tables.users - users.insert().execute( + connection.execute( + users.insert(), dict(user_id=1, user_name="john"), dict(user_id=2, user_name="jack"), ) - r = users.select(users.c.user_id == 2).execute().first() + r = connection.execute(users.select(users.c.user_id == 2)).first() eq_(r.user_id, 2) eq_(r._mapping["user_id"], 2) @@ -345,28 +354,30 @@ class ResultProxyTest(fixtures.TablesTest): eq_(r._mapping["user_name"], "jack") eq_(r._mapping[users.c.user_name], "jack") - def test_row_getitem_string(self): + def test_row_getitem_string(self, connection): users = self.tables.users - users.insert().execute( + connection.execute( + users.insert(), dict(user_id=1, user_name="john"), dict(user_id=2, user_name="jack"), ) - r = testing.db.execute( + r = connection.execute( text("select * from users where user_id=2") ).first() eq_(r._mapping["user_name"], "jack") - def test_column_accessor_basic_text(self): + def test_column_accessor_basic_text(self, connection): users = self.tables.users - users.insert().execute( + connection.execute( + users.insert(), dict(user_id=1, user_name="john"), dict(user_id=2, user_name="jack"), ) - r = testing.db.execute( + r = connection.execute( text("select * from users where user_id=2") ).first() @@ -379,14 +390,15 @@ class ResultProxyTest(fixtures.TablesTest): eq_(r.user_name, "jack") eq_(r._mapping["user_name"], "jack") - def test_column_accessor_text_colexplicit(self): + def test_column_accessor_text_colexplicit(self, connection): users = self.tables.users - users.insert().execute( + connection.execute( + users.insert(), dict(user_id=1, user_name="john"), dict(user_id=2, user_name="jack"), ) - r = testing.db.execute( + r = connection.execute( text("select * from users where user_id=2").columns( users.c.user_id, users.c.user_name ) @@ -400,16 +412,17 @@ class ResultProxyTest(fixtures.TablesTest): eq_(r._mapping["user_name"], "jack") eq_(r._mapping[users.c.user_name], "jack") - def test_column_accessor_textual_select(self): + def test_column_accessor_textual_select(self, connection): users = self.tables.users - users.insert().execute( + connection.execute( + users.insert(), dict(user_id=1, user_name="john"), dict(user_id=2, user_name="jack"), ) # this will create column() objects inside # the select(), these need to match on name anyway - r = testing.db.execute( + r = connection.execute( select([column("user_id"), column("user_name")]) .select_from(table("users")) .where(text("user_id=2")) @@ -422,14 +435,14 @@ class ResultProxyTest(fixtures.TablesTest): eq_(r.user_name, "jack") eq_(r._mapping["user_name"], "jack") - def test_column_accessor_dotted_union(self): + def test_column_accessor_dotted_union(self, connection): users = self.tables.users - users.insert().execute(dict(user_id=1, user_name="john")) + connection.execute(users.insert(), dict(user_id=1, user_name="john")) # test a little sqlite < 3.10.0 weirdness - with the UNION, # cols come back as "users.user_id" in cursor.description - r = testing.db.execute( + r = connection.execute( text( "select users.user_id, users.user_name " "from users " @@ -441,23 +454,20 @@ class ResultProxyTest(fixtures.TablesTest): eq_(r._mapping["user_name"], "john") eq_(list(r._fields), ["user_id", "user_name"]) - def test_column_accessor_sqlite_raw(self): + def test_column_accessor_sqlite_raw(self, connection): users = self.tables.users - users.insert().execute(dict(user_id=1, user_name="john")) + connection.execute(users.insert(), dict(user_id=1, user_name="john")) - r = ( + r = connection.execute( text( "select users.user_id, users.user_name " "from users " "UNION select users.user_id, " "users.user_name from users", bind=testing.db, - ) - .execution_options(sqlite_raw_colnames=True) - .execute() - .first() - ) + ).execution_options(sqlite_raw_colnames=True) + ).first() if testing.against("sqlite < 3.10.0"): not_in_("user_id", r) @@ -474,12 +484,12 @@ class ResultProxyTest(fixtures.TablesTest): eq_(list(r._fields), ["user_id", "user_name"]) - def test_column_accessor_sqlite_translated(self): + def test_column_accessor_sqlite_translated(self, connection): users = self.tables.users - users.insert().execute(dict(user_id=1, user_name="john")) + connection.execute(users.insert(), dict(user_id=1, user_name="john")) - r = ( + r = connection.execute( text( "select users.user_id, users.user_name " "from users " @@ -487,9 +497,7 @@ class ResultProxyTest(fixtures.TablesTest): "users.user_name from users", bind=testing.db, ) - .execute() - .first() - ) + ).first() eq_(r._mapping["user_id"], 1) eq_(r._mapping["user_name"], "john") @@ -502,44 +510,38 @@ class ResultProxyTest(fixtures.TablesTest): eq_(list(r._fields), ["user_id", "user_name"]) - def test_column_accessor_labels_w_dots(self): + def test_column_accessor_labels_w_dots(self, connection): users = self.tables.users - users.insert().execute(dict(user_id=1, user_name="john")) + connection.execute(users.insert(), dict(user_id=1, user_name="john")) # test using literal tablename.colname - r = ( + r = connection.execute( text( 'select users.user_id AS "users.user_id", ' 'users.user_name AS "users.user_name" ' "from users", bind=testing.db, - ) - .execution_options(sqlite_raw_colnames=True) - .execute() - .first() - ) + ).execution_options(sqlite_raw_colnames=True) + ).first() eq_(r._mapping["users.user_id"], 1) eq_(r._mapping["users.user_name"], "john") not_in_("user_name", r._mapping) eq_(list(r._fields), ["users.user_id", "users.user_name"]) - def test_column_accessor_unary(self): + def test_column_accessor_unary(self, connection): users = self.tables.users - users.insert().execute(dict(user_id=1, user_name="john")) + connection.execute(users.insert(), dict(user_id=1, user_name="john")) # unary expressions - r = ( - select([users.c.user_name.distinct()]) - .order_by(users.c.user_name) - .execute() - .first() - ) + r = connection.execute( + select([users.c.user_name.distinct()]).order_by(users.c.user_name) + ).first() eq_(r._mapping[users.c.user_name], "john") eq_(r.user_name, "john") - def test_column_accessor_err(self): - r = testing.db.execute(select([1])).first() + def test_column_accessor_err(self, connection): + r = connection.execute(select([1])).first() assert_raises_message( AttributeError, "Could not locate column in row for column 'foo'", @@ -601,6 +603,7 @@ class ResultProxyTest(fixtures.TablesTest): ) def test_connectionless_autoclose_rows_exhausted(self): + # TODO: deprecate for 2.0 users = self.tables.users with testing.db.connect() as conn: conn.execute(users.insert(), dict(user_id=1, user_name="john")) @@ -615,6 +618,7 @@ class ResultProxyTest(fixtures.TablesTest): @testing.requires.returning def test_connectionless_autoclose_crud_rows_exhausted(self): + # TODO: deprecate for 2.0 users = self.tables.users stmt = ( users.insert() @@ -630,6 +634,7 @@ class ResultProxyTest(fixtures.TablesTest): assert connection.closed def test_connectionless_autoclose_no_rows(self): + # TODO: deprecate for 2.0 result = testing.db.execute(text("select * from users")) connection = result.connection assert not connection.closed @@ -638,6 +643,7 @@ class ResultProxyTest(fixtures.TablesTest): @testing.requires.updateable_autoincrement_pks def test_connectionless_autoclose_no_metadata(self): + # TODO: deprecate for 2.0 result = testing.db.execute(text("update users set user_id=5")) connection = result.connection assert connection.closed @@ -647,8 +653,8 @@ class ResultProxyTest(fixtures.TablesTest): result.fetchone, ) - def test_row_case_sensitive(self): - row = testing.db.execute( + def test_row_case_sensitive(self, connection): + row = connection.execute( select( [ literal_column("1").label("case_insensitive"), @@ -670,75 +676,80 @@ class ResultProxyTest(fixtures.TablesTest): assert_raises(KeyError, lambda: row._mapping["casesensitive"]) def test_row_case_sensitive_unoptimized(self): - ins_db = engines.testing_engine() - row = ins_db.execute( - select( - [ - literal_column("1").label("case_insensitive"), - literal_column("2").label("CaseSensitive"), - text("3 AS screw_up_the_cols"), - ] - ) - ).first() + with engines.testing_engine().connect() as ins_conn: + row = ins_conn.execute( + select( + [ + literal_column("1").label("case_insensitive"), + literal_column("2").label("CaseSensitive"), + text("3 AS screw_up_the_cols"), + ] + ) + ).first() - eq_( - list(row._fields), - ["case_insensitive", "CaseSensitive", "screw_up_the_cols"], - ) + eq_( + list(row._fields), + ["case_insensitive", "CaseSensitive", "screw_up_the_cols"], + ) - in_("case_insensitive", row._keymap) - in_("CaseSensitive", row._keymap) - not_in_("casesensitive", row._keymap) + in_("case_insensitive", row._keymap) + in_("CaseSensitive", row._keymap) + not_in_("casesensitive", row._keymap) - eq_(row._mapping["case_insensitive"], 1) - eq_(row._mapping["CaseSensitive"], 2) - eq_(row._mapping["screw_up_the_cols"], 3) + eq_(row._mapping["case_insensitive"], 1) + eq_(row._mapping["CaseSensitive"], 2) + eq_(row._mapping["screw_up_the_cols"], 3) - assert_raises(KeyError, lambda: row._mapping["Case_insensitive"]) - assert_raises(KeyError, lambda: row._mapping["casesensitive"]) - assert_raises(KeyError, lambda: row._mapping["screw_UP_the_cols"]) + assert_raises(KeyError, lambda: row._mapping["Case_insensitive"]) + assert_raises(KeyError, lambda: row._mapping["casesensitive"]) + assert_raises(KeyError, lambda: row._mapping["screw_UP_the_cols"]) - def test_row_as_args(self): + def test_row_as_args(self, connection): users = self.tables.users - users.insert().execute(user_id=1, user_name="john") - r = users.select(users.c.user_id == 1).execute().first() - users.delete().execute() - users.insert().execute(r._mapping) - eq_(users.select().execute().fetchall(), [(1, "john")]) + connection.execute(users.insert(), user_id=1, user_name="john") + r = connection.execute(users.select(users.c.user_id == 1)).first() + connection.execute(users.delete()) + connection.execute(users.insert(), r._mapping) + eq_(connection.execute(users.select()).fetchall(), [(1, "john")]) - def test_result_as_args(self): + def test_result_as_args(self, connection): users = self.tables.users users2 = self.tables.users2 - users.insert().execute( + connection.execute( + users.insert(), [ dict(user_id=1, user_name="john"), dict(user_id=2, user_name="ed"), - ] + ], ) - r = users.select().execute() - users2.insert().execute([row._mapping for row in r]) + r = connection.execute(users.select()) + connection.execute(users2.insert(), [row._mapping for row in r]) eq_( - users2.select().order_by(users2.c.user_id).execute().fetchall(), + connection.execute( + users2.select().order_by(users2.c.user_id) + ).fetchall(), [(1, "john"), (2, "ed")], ) - users2.delete().execute() - r = users.select().execute() - users2.insert().execute(*[row._mapping for row in r]) + connection.execute(users2.delete()) + r = connection.execute(users.select()) + connection.execute(users2.insert(), *[row._mapping for row in r]) eq_( - users2.select().order_by(users2.c.user_id).execute().fetchall(), + connection.execute( + users2.select().order_by(users2.c.user_id) + ).fetchall(), [(1, "john"), (2, "ed")], ) @testing.requires.duplicate_names_in_cursor_description - def test_ambiguous_column(self): + def test_ambiguous_column(self, connection): users = self.tables.users addresses = self.tables.addresses - users.insert().execute(user_id=1, user_name="john") - result = users.outerjoin(addresses).select().execute() + connection.execute(users.insert(), user_id=1, user_name="john") + result = connection.execute(users.outerjoin(addresses).select()) r = result.first() assert_raises_message( @@ -776,7 +787,7 @@ class ResultProxyTest(fixtures.TablesTest): lambda: r._mapping["user_id"], ) - result = users.outerjoin(addresses).select().execute() + result = connection.execute(users.outerjoin(addresses).select()) result = _result.BufferedColumnResultProxy(result.context) r = result.first() assert isinstance(r, _result.BufferedColumnRow) @@ -787,16 +798,16 @@ class ResultProxyTest(fixtures.TablesTest): ) @testing.requires.duplicate_names_in_cursor_description - def test_ambiguous_column_by_col(self): + def test_ambiguous_column_by_col(self, connection): users = self.tables.users - users.insert().execute(user_id=1, user_name="john") + connection.execute(users.insert(), user_id=1, user_name="john") ua = users.alias() u2 = users.alias() - result = ( - select([users.c.user_id, ua.c.user_id]) - .select_from(users.join(ua, true())) - .execute() + result = connection.execute( + select([users.c.user_id, ua.c.user_id]).select_from( + users.join(ua, true()) + ) ) row = result.first() @@ -815,18 +826,18 @@ class ResultProxyTest(fixtures.TablesTest): ) @testing.requires.duplicate_names_in_cursor_description - def test_ambiguous_column_contains(self): + def test_ambiguous_column_contains(self, connection): users = self.tables.users addresses = self.tables.addresses # ticket 2702. in 0.7 we'd get True, False. # in 0.8, both columns are present so it's True; # but when they're fetched you'll get the ambiguous error. - users.insert().execute(user_id=1, user_name="john") - result = ( - select([users.c.user_id, addresses.c.user_id]) - .select_from(users.outerjoin(addresses)) - .execute() + connection.execute(users.insert(), user_id=1, user_name="john") + result = connection.execute( + select([users.c.user_id, addresses.c.user_id]).select_from( + users.outerjoin(addresses) + ) ) row = result.first() @@ -840,106 +851,102 @@ class ResultProxyTest(fixtures.TablesTest): set([True]), ) - def test_loose_matching_one(self): + def test_loose_matching_one(self, connection): users = self.tables.users addresses = self.tables.addresses - with testing.db.connect() as conn: - conn.execute(users.insert(), {"user_id": 1, "user_name": "john"}) - conn.execute( - addresses.insert(), - {"address_id": 1, "user_id": 1, "address": "email"}, + connection.execute(users.insert(), {"user_id": 1, "user_name": "john"}) + connection.execute( + addresses.insert(), + {"address_id": 1, "user_id": 1, "address": "email"}, + ) + + # use some column labels in the SELECT + result = connection.execute( + TextualSelect( + text( + "select users.user_name AS users_user_name, " + "users.user_id AS user_id, " + "addresses.address_id AS address_id " + "FROM users JOIN addresses " + "ON users.user_id = addresses.user_id " + "WHERE users.user_id=1 " + ), + [users.c.user_id, users.c.user_name, addresses.c.address_id], + positional=False, ) + ) + row = result.first() + eq_(row._mapping[users.c.user_id], 1) + eq_(row._mapping[users.c.user_name], "john") - # use some column labels in the SELECT - result = conn.execute( - TextualSelect( - text( - "select users.user_name AS users_user_name, " - "users.user_id AS user_id, " - "addresses.address_id AS address_id " - "FROM users JOIN addresses " - "ON users.user_id = addresses.user_id " - "WHERE users.user_id=1 " - ), - [ - users.c.user_id, - users.c.user_name, - addresses.c.address_id, - ], - positional=False, - ) - ) - row = result.first() - eq_(row._mapping[users.c.user_id], 1) - eq_(row._mapping[users.c.user_name], "john") - - def test_loose_matching_two(self): + def test_loose_matching_two(self, connection): users = self.tables.users addresses = self.tables.addresses - with testing.db.connect() as conn: - conn.execute(users.insert(), {"user_id": 1, "user_name": "john"}) - conn.execute( - addresses.insert(), - {"address_id": 1, "user_id": 1, "address": "email"}, - ) - - # use some column labels in the SELECT - result = conn.execute( - TextualSelect( - text( - "select users.user_name AS users_user_name, " - "users.user_id AS user_id, " - "addresses.user_id " - "FROM users JOIN addresses " - "ON users.user_id = addresses.user_id " - "WHERE users.user_id=1 " - ), - [users.c.user_id, users.c.user_name, addresses.c.user_id], - positional=False, - ) + connection.execute(users.insert(), {"user_id": 1, "user_name": "john"}) + connection.execute( + addresses.insert(), + {"address_id": 1, "user_id": 1, "address": "email"}, + ) + + # use some column labels in the SELECT + result = connection.execute( + TextualSelect( + text( + "select users.user_name AS users_user_name, " + "users.user_id AS user_id, " + "addresses.user_id " + "FROM users JOIN addresses " + "ON users.user_id = addresses.user_id " + "WHERE users.user_id=1 " + ), + [users.c.user_id, users.c.user_name, addresses.c.user_id], + positional=False, ) - row = result.first() + ) + row = result.first() - assert_raises_message( - exc.InvalidRequestError, - "Ambiguous column name", - lambda: row._mapping[users.c.user_id], - ) - assert_raises_message( - exc.InvalidRequestError, - "Ambiguous column name", - lambda: row._mapping[addresses.c.user_id], - ) - eq_(row._mapping[users.c.user_name], "john") + assert_raises_message( + exc.InvalidRequestError, + "Ambiguous column name", + lambda: row._mapping[users.c.user_id], + ) + assert_raises_message( + exc.InvalidRequestError, + "Ambiguous column name", + lambda: row._mapping[addresses.c.user_id], + ) + eq_(row._mapping[users.c.user_name], "john") - def test_ambiguous_column_by_col_plus_label(self): + def test_ambiguous_column_by_col_plus_label(self, connection): users = self.tables.users - users.insert().execute(user_id=1, user_name="john") - result = select( - [ - users.c.user_id, - type_coerce(users.c.user_id, Integer).label("foo"), - ] - ).execute() + connection.execute(users.insert(), user_id=1, user_name="john") + result = connection.execute( + select( + [ + users.c.user_id, + type_coerce(users.c.user_id, Integer).label("foo"), + ] + ) + ) row = result.first() eq_(row._mapping[users.c.user_id], 1) eq_(row[1], 1) - def test_fetch_partial_result_map(self): + def test_fetch_partial_result_map(self, connection): users = self.tables.users - users.insert().execute(user_id=7, user_name="ed") + connection.execute(users.insert(), user_id=7, user_name="ed") t = text("select * from users").columns(user_name=String()) - eq_(testing.db.execute(t).fetchall(), [(7, "ed")]) + eq_(connection.execute(t).fetchall(), [(7, "ed")]) - def test_fetch_unordered_result_map(self): + def test_fetch_unordered_result_map(self, connection): users = self.tables.users - users.insert().execute(user_id=7, user_name="ed") + connection.execute(users.insert(), user_id=7, user_name="ed") class Goofy1(TypeDecorator): impl = String @@ -963,28 +970,28 @@ class ResultProxyTest(fixtures.TablesTest): "select user_name as a, user_name as b, " "user_name as c from users" ).columns(a=Goofy1(), b=Goofy2(), c=Goofy3()) - eq_(testing.db.execute(t).fetchall(), [("eda", "edb", "edc")]) + eq_(connection.execute(t).fetchall(), [("eda", "edb", "edc")]) @testing.requires.subqueries - def test_column_label_targeting(self): + def test_column_label_targeting(self, connection): users = self.tables.users - users.insert().execute(user_id=7, user_name="ed") + connection.execute(users.insert(), user_id=7, user_name="ed") for s in ( users.select().alias("foo"), users.select().alias(users.name), ): - row = s.select(use_labels=True).execute().first() + row = connection.execute(s.select(use_labels=True)).first() eq_(row._mapping[s.c.user_id], 7) eq_(row._mapping[s.c.user_name], "ed") @testing.requires.python3 - def test_ro_mapping_py3k(self): + def test_ro_mapping_py3k(self, connection): users = self.tables.users - users.insert().execute(user_id=1, user_name="foo") - result = users.select().execute() + connection.execute(users.insert(), user_id=1, user_name="foo") + result = connection.execute(users.select()) row = result.first() dict_row = row._asdict() @@ -1003,11 +1010,11 @@ class ResultProxyTest(fixtures.TablesTest): eq_(odict_row.items(), mapping_row.items()) @testing.requires.python2 - def test_ro_mapping_py2k(self): + def test_ro_mapping_py2k(self, connection): users = self.tables.users - users.insert().execute(user_id=1, user_name="foo") - result = users.select().execute() + connection.execute(users.insert(), user_id=1, user_name="foo") + result = connection.execute(users.select()) row = result.first() dict_row = row._asdict() @@ -1023,33 +1030,33 @@ class ResultProxyTest(fixtures.TablesTest): eq_(odict_row.values(), list(mapping_row.values())) eq_(odict_row.items(), list(mapping_row.items())) - def test_keys(self): + def test_keys(self, connection): users = self.tables.users - users.insert().execute(user_id=1, user_name="foo") - result = users.select().execute() + connection.execute(users.insert(), user_id=1, user_name="foo") + result = connection.execute(users.select()) eq_(result.keys(), ["user_id", "user_name"]) row = result.first() eq_(list(row._mapping.keys()), ["user_id", "user_name"]) eq_(row._fields, ("user_id", "user_name")) - def test_row_keys_legacy_dont_warn(self): + def test_row_keys_legacy_dont_warn(self, connection): users = self.tables.users - users.insert().execute(user_id=1, user_name="foo") - result = users.select().execute() + connection.execute(users.insert(), user_id=1, user_name="foo") + result = connection.execute(users.select()) row = result.first() # DO NOT WARN DEPRECATED IN 1.x, ONLY 2.0 WARNING eq_(dict(row), {"user_id": 1, "user_name": "foo"}) eq_(row.keys(), ["user_id", "user_name"]) - def test_keys_anon_labels(self): + def test_keys_anon_labels(self, connection): """test [ticket:3483]""" users = self.tables.users - users.insert().execute(user_id=1, user_name="foo") - result = testing.db.execute( + connection.execute(users.insert(), user_id=1, user_name="foo") + result = connection.execute( select( [ users.c.user_id, @@ -1064,11 +1071,11 @@ class ResultProxyTest(fixtures.TablesTest): eq_(row._fields, ("user_id", "user_name_1", "count_1")) eq_(list(row._mapping.keys()), ["user_id", "user_name_1", "count_1"]) - def test_items(self): + def test_items(self, connection): users = self.tables.users - users.insert().execute(user_id=1, user_name="foo") - r = users.select().execute().first() + connection.execute(users.insert(), user_id=1, user_name="foo") + r = connection.execute(users.select()).first() eq_( [(x[0].lower(), x[1]) for x in list(r._mapping.items())], [("user_id", 1), ("user_name", "foo")], @@ -1088,27 +1095,30 @@ class ResultProxyTest(fixtures.TablesTest): r = connection.exec_driver_sql("select user_name from users").first() eq_(len(r), 1) - def test_sorting_in_python(self): + def test_sorting_in_python(self, connection): users = self.tables.users - users.insert().execute( + connection.execute( + users.insert(), dict(user_id=1, user_name="foo"), dict(user_id=2, user_name="bar"), dict(user_id=3, user_name="def"), ) - rows = users.select().order_by(users.c.user_name).execute().fetchall() + rows = connection.execute( + users.select().order_by(users.c.user_name) + ).fetchall() eq_(rows, [(2, "bar"), (3, "def"), (1, "foo")]) eq_(sorted(rows), [(1, "foo"), (2, "bar"), (3, "def")]) - def test_column_order_with_simple_query(self): + def test_column_order_with_simple_query(self, connection): # should return values in column definition order users = self.tables.users - users.insert().execute(user_id=1, user_name="foo") - r = users.select(users.c.user_id == 1).execute().first() + connection.execute(users.insert(), user_id=1, user_name="foo") + r = connection.execute(users.select(users.c.user_id == 1)).first() eq_(r[0], 1) eq_(r[1], "foo") eq_([x.lower() for x in r._fields], ["user_id", "user_name"]) @@ -1128,10 +1138,10 @@ class ResultProxyTest(fixtures.TablesTest): eq_([x.lower() for x in r._fields], ["user_name", "user_id"]) eq_(list(r._mapping.values()), ["foo", 1]) - @testing.crashes("oracle", "FIXME: unknown, varify not fails_on()") + @testing.crashes("oracle", "FIXME: unknown, verify not fails_on()") @testing.crashes("firebird", "An identifier must begin with a letter") @testing.provide_metadata - def test_column_accessor_shadow(self): + def test_column_accessor_shadow(self, connection): shadowed = Table( "test_shadowed", self.metadata, @@ -1142,8 +1152,9 @@ class ResultProxyTest(fixtures.TablesTest): Column("_parent", VARCHAR(20)), Column("_row", VARCHAR(20)), ) - self.metadata.create_all() - shadowed.insert().execute( + self.metadata.create_all(connection) + connection.execute( + shadowed.insert(), shadow_id=1, shadow_name="The Shadow", parent="The Light", @@ -1151,7 +1162,9 @@ class ResultProxyTest(fixtures.TablesTest): _parent="Hidden parent", _row="Hidden row", ) - r = shadowed.select(shadowed.c.shadow_id == 1).execute().first() + r = connection.execute( + shadowed.select(shadowed.c.shadow_id == 1) + ).first() eq_(r.shadow_id, 1) eq_(r._mapping["shadow_id"], 1) @@ -1221,25 +1234,26 @@ class ResultProxyTest(fixtures.TablesTest): with patch.object( engine.dialect.execution_ctx_cls, "rowcount" ) as mock_rowcount: - mock_rowcount.__get__ = Mock() - engine.execute( - t.insert(), {"data": "d1"}, {"data": "d2"}, {"data": "d3"} - ) + with engine.connect() as conn: + mock_rowcount.__get__ = Mock() + conn.execute( + t.insert(), {"data": "d1"}, {"data": "d2"}, {"data": "d3"} + ) - eq_(len(mock_rowcount.__get__.mock_calls), 0) + eq_(len(mock_rowcount.__get__.mock_calls), 0) - eq_( - engine.execute(t.select()).fetchall(), - [("d1",), ("d2",), ("d3",)], - ) - eq_(len(mock_rowcount.__get__.mock_calls), 0) + eq_( + conn.execute(t.select()).fetchall(), + [("d1",), ("d2",), ("d3",)], + ) + eq_(len(mock_rowcount.__get__.mock_calls), 0) - engine.execute(t.update(), {"data": "d4"}) + conn.execute(t.update(), {"data": "d4"}) - eq_(len(mock_rowcount.__get__.mock_calls), 1) + eq_(len(mock_rowcount.__get__.mock_calls), 1) - engine.execute(t.delete()) - eq_(len(mock_rowcount.__get__.mock_calls), 2) + conn.execute(t.delete()) + eq_(len(mock_rowcount.__get__.mock_calls), 2) def test_row_is_sequence(self): @@ -1259,17 +1273,17 @@ class ResultProxyTest(fixtures.TablesTest): eq_(hash(row), hash((1, "value", "foo"))) @testing.provide_metadata - def test_row_getitem_indexes_compiled(self): + def test_row_getitem_indexes_compiled(self, connection): values = Table( "rp", self.metadata, Column("key", String(10), primary_key=True), Column("value", String(10)), ) - values.create() + values.create(connection) - testing.db.execute(values.insert(), dict(key="One", value="Uno")) - row = testing.db.execute(values.select()).first() + connection.execute(values.insert(), dict(key="One", value="Uno")) + row = connection.execute(values.select()).first() eq_(row._mapping["key"], "One") eq_(row._mapping["value"], "Uno") eq_(row[0], "One") @@ -1293,7 +1307,7 @@ class ResultProxyTest(fixtures.TablesTest): @testing.requires.cextensions def test_row_c_sequence_check(self): - + # TODO: modernize for 2.0 metadata = MetaData() metadata.bind = "sqlite://" users = Table( @@ -1409,39 +1423,39 @@ class KeyTargetingTest(fixtures.TablesTest): ) @testing.requires.schemas - def test_keyed_accessor_wschema(self): + def test_keyed_accessor_wschema(self, connection): keyed1 = self.tables["%s.wschema" % testing.config.test_schema] - row = testing.db.execute(keyed1.select()).first() + row = connection.execute(keyed1.select()).first() eq_(row.b, "a1") eq_(row.q, "c1") eq_(row.a, "a1") eq_(row.c, "c1") - def test_keyed_accessor_single(self): + def test_keyed_accessor_single(self, connection): keyed1 = self.tables.keyed1 - row = testing.db.execute(keyed1.select()).first() + row = connection.execute(keyed1.select()).first() eq_(row.b, "a1") eq_(row.q, "c1") eq_(row.a, "a1") eq_(row.c, "c1") - def test_keyed_accessor_single_labeled(self): + def test_keyed_accessor_single_labeled(self, connection): keyed1 = self.tables.keyed1 - row = testing.db.execute(keyed1.select().apply_labels()).first() + row = connection.execute(keyed1.select().apply_labels()).first() eq_(row.keyed1_b, "a1") eq_(row.keyed1_q, "c1") eq_(row.keyed1_a, "a1") eq_(row.keyed1_c, "c1") - def _test_keyed_targeting_no_label_at_all(self, expression): + def _test_keyed_targeting_no_label_at_all(self, expression, conn): lt = literal_column("2") stmt = select([literal_column("1"), expression, lt]).select_from( self.tables.keyed1 ) - row = testing.db.execute(stmt).first() + row = conn.execute(stmt).first() eq_(row._mapping[expression], "a1") eq_(row._mapping[lt], 2) @@ -1450,7 +1464,7 @@ class KeyTargetingTest(fixtures.TablesTest): # easily. we get around that because we know that "2" is unique eq_(row._mapping["2"], 2) - def test_keyed_targeting_no_label_at_all_one(self): + def test_keyed_targeting_no_label_at_all_one(self, connection): class not_named_max(expression.ColumnElement): name = "not_named_max" @@ -1464,9 +1478,9 @@ class KeyTargetingTest(fixtures.TablesTest): eq_(str(select([not_named_max()])), "SELECT max(a)") nnm = not_named_max() - self._test_keyed_targeting_no_label_at_all(nnm) + self._test_keyed_targeting_no_label_at_all(nnm, connection) - def test_keyed_targeting_no_label_at_all_two(self): + def test_keyed_targeting_no_label_at_all_two(self, connection): class not_named_max(expression.ColumnElement): name = "not_named_max" @@ -1479,24 +1493,24 @@ class KeyTargetingTest(fixtures.TablesTest): eq_(str(select([not_named_max()])), "SELECT max(a)") nnm = not_named_max() - self._test_keyed_targeting_no_label_at_all(nnm) + self._test_keyed_targeting_no_label_at_all(nnm, connection) - def test_keyed_targeting_no_label_at_all_text(self): + def test_keyed_targeting_no_label_at_all_text(self, connection): t1 = text("max(a)") t2 = text("min(a)") stmt = select([t1, t2]).select_from(self.tables.keyed1) - row = testing.db.execute(stmt).first() + row = connection.execute(stmt).first() eq_(row._mapping[t1], "a1") eq_(row._mapping[t2], "a1") @testing.requires.duplicate_names_in_cursor_description - def test_keyed_accessor_composite_conflict_2(self): + def test_keyed_accessor_composite_conflict_2(self, connection): keyed1 = self.tables.keyed1 keyed2 = self.tables.keyed2 - row = testing.db.execute( + row = connection.execute( select([keyed1, keyed2]).select_from(keyed1.join(keyed2, true())) ).first() @@ -1522,14 +1536,16 @@ class KeyTargetingTest(fixtures.TablesTest): # illustrate why row.b above is ambiguous, and not "b2"; because # if we didn't have keyed2, now it matches row.a. a new column # shouldn't be able to grab the value from a previous column. - row = testing.db.execute(select([keyed1])).first() + row = connection.execute(select([keyed1])).first() eq_(row.b, "a1") - def test_keyed_accessor_composite_conflict_2_fix_w_uselabels(self): + def test_keyed_accessor_composite_conflict_2_fix_w_uselabels( + self, connection + ): keyed1 = self.tables.keyed1 keyed2 = self.tables.keyed2 - row = testing.db.execute( + row = connection.execute( select([keyed1, keyed2]) .select_from(keyed1.join(keyed2, true())) .apply_labels() @@ -1541,11 +1557,11 @@ class KeyTargetingTest(fixtures.TablesTest): eq_(row._mapping["keyed2_b"], "b2") eq_(row._mapping["keyed1_a"], "a1") - def test_keyed_accessor_composite_names_precedent(self): + def test_keyed_accessor_composite_names_precedent(self, connection): keyed1 = self.tables.keyed1 keyed4 = self.tables.keyed4 - row = testing.db.execute( + row = connection.execute( select([keyed1, keyed4]).select_from(keyed1.join(keyed4, true())) ).first() eq_(row.b, "b4") @@ -1554,11 +1570,11 @@ class KeyTargetingTest(fixtures.TablesTest): eq_(row.c, "c1") @testing.requires.duplicate_names_in_cursor_description - def test_keyed_accessor_composite_keys_precedent(self): + def test_keyed_accessor_composite_keys_precedent(self, connection): keyed1 = self.tables.keyed1 keyed3 = self.tables.keyed3 - row = testing.db.execute( + row = connection.execute( select([keyed1, keyed3]).select_from(keyed1.join(keyed3, true())) ).first() eq_(row.q, "c1") @@ -1578,11 +1594,11 @@ class KeyTargetingTest(fixtures.TablesTest): ) eq_(row.d, "d3") - def test_keyed_accessor_composite_labeled(self): + def test_keyed_accessor_composite_labeled(self, connection): keyed1 = self.tables.keyed1 keyed2 = self.tables.keyed2 - row = testing.db.execute( + row = connection.execute( select([keyed1, keyed2]) .select_from(keyed1.join(keyed2, true())) .apply_labels() @@ -1599,7 +1615,9 @@ class KeyTargetingTest(fixtures.TablesTest): assert_raises(KeyError, lambda: row._mapping["keyed2_c"]) assert_raises(KeyError, lambda: row._mapping["keyed2_q"]) - def test_keyed_accessor_column_is_repeated_multiple_times(self): + def test_keyed_accessor_column_is_repeated_multiple_times( + self, connection + ): # test new logic added as a result of the combination of #4892 and # #4887. We allow duplicate columns, but we also have special logic # to disambiguate for the same column repeated, and as #4887 adds @@ -1627,7 +1645,7 @@ class KeyTargetingTest(fixtures.TablesTest): .apply_labels() ) - result = testing.db.execute(stmt) + result = connection.execute(stmt) is_false(result._metadata.matched_on_name) # ensure the result map is the same number of cols so we can @@ -1664,34 +1682,34 @@ class KeyTargetingTest(fixtures.TablesTest): eq_(row[6], "d3") eq_(row[7], "d3") - def test_columnclause_schema_column_one(self): + def test_columnclause_schema_column_one(self, connection): # originally addressed by [ticket:2932], however liberalized # Column-targeting rules are deprecated a, b = sql.column("a"), sql.column("b") stmt = select([a, b]).select_from(table("keyed2")) - row = testing.db.execute(stmt).first() + row = connection.execute(stmt).first() in_(a, row._mapping) in_(b, row._mapping) - def test_columnclause_schema_column_two(self): + def test_columnclause_schema_column_two(self, connection): keyed2 = self.tables.keyed2 stmt = select([keyed2.c.a, keyed2.c.b]) - row = testing.db.execute(stmt).first() + row = connection.execute(stmt).first() in_(keyed2.c.a, row._mapping) in_(keyed2.c.b, row._mapping) - def test_columnclause_schema_column_three(self): + def test_columnclause_schema_column_three(self, connection): # this is also addressed by [ticket:2932] stmt = text("select a, b from keyed2").columns(a=CHAR, b=CHAR) - row = testing.db.execute(stmt).first() + row = connection.execute(stmt).first() in_(stmt.selected_columns.a, row._mapping) in_(stmt.selected_columns.b, row._mapping) - def test_columnclause_schema_column_four(self): + def test_columnclause_schema_column_four(self, connection): # originally addressed by [ticket:2932], however liberalized # Column-targeting rules are deprecated @@ -1699,7 +1717,7 @@ class KeyTargetingTest(fixtures.TablesTest): stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns( a, b ) - row = testing.db.execute(stmt).first() + row = connection.execute(stmt).first() in_(a, row._mapping) in_(b, row._mapping) @@ -1707,13 +1725,13 @@ class KeyTargetingTest(fixtures.TablesTest): in_(stmt.selected_columns.keyed2_a, row._mapping) in_(stmt.selected_columns.keyed2_b, row._mapping) - def test_columnclause_schema_column_five(self): + def test_columnclause_schema_column_five(self, connection): # this is also addressed by [ticket:2932] stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns( keyed2_a=CHAR, keyed2_b=CHAR ) - row = testing.db.execute(stmt).first() + row = connection.execute(stmt).first() in_(stmt.selected_columns.keyed2_a, row._mapping) in_(stmt.selected_columns.keyed2_b, row._mapping) @@ -1818,15 +1836,17 @@ class PositionalTextTest(fixtures.TablesTest): @classmethod def insert_data(cls): - cls.tables.text1.insert().execute( - [dict(a="a1", b="b1", c="c1", d="d1")] - ) + with testing.db.connect() as conn: + conn.execute( + cls.tables.text1.insert(), + [dict(a="a1", b="b1", c="c1", d="d1")], + ) - def test_via_column(self): + def test_via_column(self, connection): c1, c2, c3, c4 = column("q"), column("p"), column("r"), column("d") stmt = text("select a, b, c, d from text1").columns(c1, c2, c3, c4) - result = testing.db.execute(stmt) + result = connection.execute(stmt) row = result.first() eq_(row._mapping[c2], "b1") @@ -1838,23 +1858,23 @@ class PositionalTextTest(fixtures.TablesTest): eq_(row._mapping["r"], "c1") eq_(row._mapping["d"], "d1") - def test_fewer_cols_than_sql_positional(self): + def test_fewer_cols_than_sql_positional(self, connection): c1, c2 = column("q"), column("p") stmt = text("select a, b, c, d from text1").columns(c1, c2) # no warning as this can be similar for non-positional - result = testing.db.execute(stmt) + result = connection.execute(stmt) row = result.first() eq_(row._mapping[c1], "a1") eq_(row._mapping["c"], "c1") - def test_fewer_cols_than_sql_non_positional(self): + def test_fewer_cols_than_sql_non_positional(self, connection): c1, c2 = column("a"), column("p") stmt = text("select a, b, c, d from text1").columns(c2, c1, d=CHAR) # no warning as this can be similar for non-positional - result = testing.db.execute(stmt) + result = connection.execute(stmt) row = result.first() # c1 name matches, locates @@ -1868,7 +1888,7 @@ class PositionalTextTest(fixtures.TablesTest): lambda: row._mapping[c2], ) - def test_more_cols_than_sql_positional(self): + def test_more_cols_than_sql_positional(self, connection): c1, c2, c3, c4 = column("q"), column("p"), column("r"), column("d") stmt = text("select a, b from text1").columns(c1, c2, c3, c4) @@ -1876,7 +1896,7 @@ class PositionalTextTest(fixtures.TablesTest): r"Number of columns in textual SQL \(4\) is " r"smaller than number of columns requested \(2\)" ): - result = testing.db.execute(stmt) + result = connection.execute(stmt) row = result.first() eq_(row._mapping[c2], "b1") @@ -1887,14 +1907,14 @@ class PositionalTextTest(fixtures.TablesTest): lambda: row._mapping[c3], ) - def test_more_cols_than_sql_nonpositional(self): + def test_more_cols_than_sql_nonpositional(self, connection): c1, c2, c3, c4 = column("b"), column("a"), column("r"), column("d") stmt = TextualSelect( text("select a, b from text1"), [c1, c2, c3, c4], positional=False ) # no warning for non-positional - result = testing.db.execute(stmt) + result = connection.execute(stmt) row = result.first() eq_(row._mapping[c1], "b1") @@ -1906,7 +1926,7 @@ class PositionalTextTest(fixtures.TablesTest): lambda: row._mapping[c3], ) - def test_more_cols_than_sql_nonpositional_labeled_cols(self): + def test_more_cols_than_sql_nonpositional_labeled_cols(self, connection): text1 = self.tables.text1 c1, c2, c3, c4 = text1.c.b, text1.c.a, column("r"), column("d") @@ -1919,7 +1939,7 @@ class PositionalTextTest(fixtures.TablesTest): ) # no warning for non-positional - result = testing.db.execute(stmt) + result = connection.execute(stmt) row = result.first() eq_(row._mapping[c1], "b1") @@ -1931,7 +1951,7 @@ class PositionalTextTest(fixtures.TablesTest): lambda: row._mapping[c3], ) - def test_dupe_col_obj(self): + def test_dupe_col_obj(self, connection): c1, c2, c3 = column("q"), column("p"), column("r") stmt = text("select a, b, c, d from text1").columns(c1, c2, c3, c2) @@ -1939,11 +1959,11 @@ class PositionalTextTest(fixtures.TablesTest): exc.InvalidRequestError, "Duplicate column expression requested in " "textual SQL: <.*.ColumnClause.*; p>", - testing.db.execute, + connection.execute, stmt, ) - def test_anon_aliased_unique(self): + def test_anon_aliased_unique(self, connection): text1 = self.tables.text1 c1 = text1.c.a.label(None) @@ -1952,7 +1972,7 @@ class PositionalTextTest(fixtures.TablesTest): c4 = text1.alias().c.d.label(None) stmt = text("select a, b, c, d from text1").columns(c1, c2, c3, c4) - result = testing.db.execute(stmt) + result = connection.execute(stmt) row = result.first() eq_(row._mapping[c1], "a1") @@ -1968,7 +1988,7 @@ class PositionalTextTest(fixtures.TablesTest): lambda: row._mapping[text1.c.b], ) - def test_anon_aliased_overlapping(self): + def test_anon_aliased_overlapping(self, connection): text1 = self.tables.text1 c1 = text1.c.a.label(None) @@ -1977,7 +1997,7 @@ class PositionalTextTest(fixtures.TablesTest): c4 = text1.c.a.label(None) stmt = text("select a, b, c, d from text1").columns(c1, c2, c3, c4) - result = testing.db.execute(stmt) + result = connection.execute(stmt) row = result.first() eq_(row._mapping[c1], "a1") @@ -1985,7 +2005,7 @@ class PositionalTextTest(fixtures.TablesTest): eq_(row._mapping[c3], "c1") eq_(row._mapping[c4], "d1") - def test_anon_aliased_name_conflict(self): + def test_anon_aliased_name_conflict(self, connection): text1 = self.tables.text1 c1 = text1.c.a.label("a") @@ -1998,7 +2018,7 @@ class PositionalTextTest(fixtures.TablesTest): stmt = text("select a, b as a, c as a, d as a from text1").columns( c1, c2, c3, c4 ) - result = testing.db.execute(stmt) + result = connection.execute(stmt) row = result.first() eq_(row._mapping[c1], "a1") @@ -2034,10 +2054,11 @@ class AlternateResultProxyTest(fixtures.TablesTest): @classmethod def insert_data(cls): - cls.engine.execute( - cls.tables.test.insert(), - [{"x": i, "y": "t_%d" % i} for i in range(1, 12)], - ) + with cls.engine.connect() as conn: + conn.execute( + cls.tables.test.insert(), + [{"x": i, "y": "t_%d" % i} for i in range(1, 12)], + ) @contextmanager def _proxy_fixture(self, cls): @@ -2059,53 +2080,54 @@ class AlternateResultProxyTest(fixtures.TablesTest): def _test_proxy(self, cls): with self._proxy_fixture(cls): rows = [] - r = self.engine.execute(select([self.table])) - assert isinstance(r.cursor_strategy, cls) - for i in range(5): - rows.append(r.fetchone()) - eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)]) + with self.engine.connect() as conn: + r = conn.execute(select([self.table])) + assert isinstance(r.cursor_strategy, cls) + for i in range(5): + rows.append(r.fetchone()) + eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)]) - rows = r.fetchmany(3) - eq_(rows, [(i, "t_%d" % i) for i in range(6, 9)]) + rows = r.fetchmany(3) + eq_(rows, [(i, "t_%d" % i) for i in range(6, 9)]) - rows = r.fetchall() - eq_(rows, [(i, "t_%d" % i) for i in range(9, 12)]) + rows = r.fetchall() + eq_(rows, [(i, "t_%d" % i) for i in range(9, 12)]) - r = self.engine.execute(select([self.table])) - rows = r.fetchmany(None) - eq_(rows[0], (1, "t_1")) - # number of rows here could be one, or the whole thing - assert len(rows) == 1 or len(rows) == 11 + r = conn.execute(select([self.table])) + rows = r.fetchmany(None) + eq_(rows[0], (1, "t_1")) + # number of rows here could be one, or the whole thing + assert len(rows) == 1 or len(rows) == 11 - r = self.engine.execute(select([self.table]).limit(1)) - r.fetchone() - eq_(r.fetchone(), None) + r = conn.execute(select([self.table]).limit(1)) + r.fetchone() + eq_(r.fetchone(), None) - r = self.engine.execute(select([self.table]).limit(5)) - rows = r.fetchmany(6) - eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)]) + r = conn.execute(select([self.table]).limit(5)) + rows = r.fetchmany(6) + eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)]) - # result keeps going just fine with blank results... - eq_(r.fetchmany(2), []) + # result keeps going just fine with blank results... + eq_(r.fetchmany(2), []) - eq_(r.fetchmany(2), []) + eq_(r.fetchmany(2), []) - eq_(r.fetchall(), []) + eq_(r.fetchall(), []) - eq_(r.fetchone(), None) + eq_(r.fetchone(), None) - # until we close - r.close() + # until we close + r.close() - self._assert_result_closed(r) + self._assert_result_closed(r) - r = self.engine.execute(select([self.table]).limit(5)) - eq_(r.first(), (1, "t_1")) - self._assert_result_closed(r) + r = conn.execute(select([self.table]).limit(5)) + eq_(r.first(), (1, "t_1")) + self._assert_result_closed(r) - r = self.engine.execute(select([self.table]).limit(5)) - eq_(r.scalar(), 1) - self._assert_result_closed(r) + r = conn.execute(select([self.table]).limit(5)) + eq_(r.scalar(), 1) + self._assert_result_closed(r) def _assert_result_closed(self, r): assert_raises_message( diff --git a/test/sql/test_returning.py b/test/sql/test_returning.py index d81ad7186..5f655db6d 100644 --- a/test/sql/test_returning.py +++ b/test/sql/test_returning.py @@ -53,16 +53,17 @@ class ReturningTest(fixtures.TestBase, AssertsExecutionResults): Column("full", Boolean), Column("goofy", GoofyType(50)), ) - table.create(checkfirst=True) + with testing.db.connect() as conn: + table.create(conn, checkfirst=True) def teardown(self): - table.drop() + with testing.db.connect() as conn: + table.drop(conn) - def test_column_targeting(self): - result = ( - table.insert() - .returning(table.c.id, table.c.full) - .execute({"persons": 1, "full": False}) + def test_column_targeting(self, connection): + result = connection.execute( + table.insert().returning(table.c.id, table.c.full), + {"persons": 1, "full": False}, ) row = result.first()._mapping @@ -70,11 +71,10 @@ class ReturningTest(fixtures.TestBase, AssertsExecutionResults): assert row[table.c.full] == row["full"] assert row["full"] is False - result = ( + result = connection.execute( table.insert() .values(persons=5, full=True, goofy="somegoofy") .returning(table.c.persons, table.c.full, table.c.goofy) - .execute() ) row = result.first()._mapping assert row[table.c.persons] == row["persons"] == 5 @@ -84,12 +84,11 @@ class ReturningTest(fixtures.TestBase, AssertsExecutionResults): eq_(row["goofy"], "FOOsomegoofyBAR") @testing.fails_on("firebird", "fb can't handle returning x AS y") - def test_labeling(self): - result = ( + def test_labeling(self, connection): + result = connection.execute( table.insert() .values(persons=6) .returning(table.c.persons.label("lala")) - .execute() ) row = result.first()._mapping assert row["lala"] == 6 @@ -97,53 +96,48 @@ class ReturningTest(fixtures.TestBase, AssertsExecutionResults): @testing.fails_on( "firebird", "fb/kintersbasdb can't handle the bind params" ) - def test_anon_expressions(self): - result = ( + def test_anon_expressions(self, connection): + result = connection.execute( table.insert() .values(goofy="someOTHERgoofy") .returning(func.lower(table.c.goofy, type_=GoofyType)) - .execute() ) row = result.first() eq_(row[0], "foosomeothergoofyBAR") - result = ( - table.insert() - .values(persons=12) - .returning(table.c.persons + 18) - .execute() + result = connection.execute( + table.insert().values(persons=12).returning(table.c.persons + 18) ) row = result.first() eq_(row[0], 30) - def test_update_returning(self): - table.insert().execute( - [{"persons": 5, "full": False}, {"persons": 3, "full": False}] + def test_update_returning(self, connection): + connection.execute( + table.insert(), + [{"persons": 5, "full": False}, {"persons": 3, "full": False}], ) - result = ( - table.update(table.c.persons > 4, dict(full=True)) - .returning(table.c.id) - .execute() + result = connection.execute( + table.update(table.c.persons > 4, dict(full=True)).returning( + table.c.id + ) ) eq_(result.fetchall(), [(1,)]) - result2 = ( - select([table.c.id, table.c.full]).order_by(table.c.id).execute() + result2 = connection.execute( + select([table.c.id, table.c.full]).order_by(table.c.id) ) eq_(result2.fetchall(), [(1, True), (2, False)]) - def test_insert_returning(self): - result = ( - table.insert() - .returning(table.c.id) - .execute({"persons": 1, "full": False}) + def test_insert_returning(self, connection): + result = connection.execute( + table.insert().returning(table.c.id), {"persons": 1, "full": False} ) eq_(result.fetchall(), [(1,)]) @testing.requires.multivalues_inserts - def test_multirow_returning(self): + def test_multirow_returning(self, connection): ins = ( table.insert() .returning(table.c.id, table.c.persons) @@ -155,11 +149,11 @@ class ReturningTest(fixtures.TestBase, AssertsExecutionResults): ] ) ) - result = testing.db.execute(ins) + result = connection.execute(ins) eq_(result.fetchall(), [(1, 1), (2, 2), (3, 3)]) - def test_no_ipk_on_returning(self): - result = testing.db.execute( + def test_no_ipk_on_returning(self, connection): + result = connection.execute( table.insert().returning(table.c.id), {"persons": 1, "full": False} ) assert_raises_message( @@ -183,18 +177,19 @@ class ReturningTest(fixtures.TestBase, AssertsExecutionResults): ) eq_([dict(row._mapping) for row in result4], [{"persons": 10}]) - def test_delete_returning(self): - table.insert().execute( - [{"persons": 5, "full": False}, {"persons": 3, "full": False}] + def test_delete_returning(self, connection): + connection.execute( + table.insert(), + [{"persons": 5, "full": False}, {"persons": 3, "full": False}], ) - result = ( - table.delete(table.c.persons > 4).returning(table.c.id).execute() + result = connection.execute( + table.delete(table.c.persons > 4).returning(table.c.id) ) eq_(result.fetchall(), [(1,)]) - result2 = ( - select([table.c.id, table.c.full]).order_by(table.c.id).execute() + result2 = connection.execute( + select([table.c.id, table.c.full]).order_by(table.c.id) ) eq_(result2.fetchall(), [(2, False)]) @@ -204,7 +199,7 @@ class CompositeStatementTest(fixtures.TestBase): __backend__ = True @testing.provide_metadata - def test_select_doesnt_pollute_result(self): + def test_select_doesnt_pollute_result(self, connection): class MyType(TypeDecorator): impl = Integer @@ -215,18 +210,17 @@ class CompositeStatementTest(fixtures.TestBase): t2 = Table("t2", self.metadata, Column("x", Integer)) - self.metadata.create_all(testing.db) - with testing.db.connect() as conn: - conn.execute(t1.insert().values(x=5)) + self.metadata.create_all(connection) + connection.execute(t1.insert().values(x=5)) - stmt = ( - t2.insert() - .values(x=select([t1.c.x]).scalar_subquery()) - .returning(t2.c.x) - ) + stmt = ( + t2.insert() + .values(x=select([t1.c.x]).scalar_subquery()) + .returning(t2.c.x) + ) - result = conn.execute(stmt) - eq_(result.scalar(), 5) + result = connection.execute(stmt) + eq_(result.scalar(), 5) class SequenceReturningTest(fixtures.TestBase): @@ -243,15 +237,19 @@ class SequenceReturningTest(fixtures.TestBase): Column("id", Integer, seq, primary_key=True), Column("data", String(50)), ) - table.create(checkfirst=True) + with testing.db.connect() as conn: + table.create(conn, checkfirst=True) def teardown(self): - table.drop() + with testing.db.connect() as conn: + table.drop(conn) - def test_insert(self): - r = table.insert().values(data="hi").returning(table.c.id).execute() + def test_insert(self, connection): + r = connection.execute( + table.insert().values(data="hi").returning(table.c.id) + ) assert r.first() == (1,) - assert seq.execute() == 2 + assert connection.execute(seq) == 2 class KeyReturningTest(fixtures.TestBase, AssertsExecutionResults): @@ -277,21 +275,23 @@ class KeyReturningTest(fixtures.TestBase, AssertsExecutionResults): ), Column("data", String(20)), ) - table.create(checkfirst=True) + with testing.db.connect() as conn: + table.create(conn, checkfirst=True) def teardown(self): - table.drop() + with testing.db.connect() as conn: + table.drop(conn) @testing.exclude("firebird", "<", (2, 0), "2.0+ feature") @testing.exclude("postgresql", "<", (8, 2), "8.2+ feature") - def test_insert(self): - result = ( - table.insert().returning(table.c.foo_id).execute(data="somedata") + def test_insert(self, connection): + result = connection.execute( + table.insert().returning(table.c.foo_id), data="somedata" ) row = result.first()._mapping assert row[table.c.foo_id] == row["id"] == 1 - result = table.select().execute().first()._mapping + result = connection.execute(table.select()).first()._mapping assert row[table.c.foo_id] == row["id"] == 1 @@ -325,9 +325,9 @@ class ReturnDefaultsTest(fixtures.TablesTest): Column("upddef", Integer, onupdate=IncDefault()), ) - def test_chained_insert_pk(self): + def test_chained_insert_pk(self, connection): t1 = self.tables.t1 - result = testing.db.execute( + result = connection.execute( t1.insert().values(upddef=1).return_defaults(t1.c.insdef) ) eq_( @@ -338,9 +338,9 @@ class ReturnDefaultsTest(fixtures.TablesTest): [1, 0], ) - def test_arg_insert_pk(self): + def test_arg_insert_pk(self, connection): t1 = self.tables.t1 - result = testing.db.execute( + result = connection.execute( t1.insert(return_defaults=[t1.c.insdef]).values(upddef=1) ) eq_( @@ -351,32 +351,32 @@ class ReturnDefaultsTest(fixtures.TablesTest): [1, 0], ) - def test_chained_update_pk(self): + def test_chained_update_pk(self, connection): t1 = self.tables.t1 - testing.db.execute(t1.insert().values(upddef=1)) - result = testing.db.execute( + connection.execute(t1.insert().values(upddef=1)) + result = connection.execute( t1.update().values(data="d1").return_defaults(t1.c.upddef) ) eq_( [result.returned_defaults._mapping[k] for k in (t1.c.upddef,)], [1] ) - def test_arg_update_pk(self): + def test_arg_update_pk(self, connection): t1 = self.tables.t1 - testing.db.execute(t1.insert().values(upddef=1)) - result = testing.db.execute( + connection.execute(t1.insert().values(upddef=1)) + result = connection.execute( t1.update(return_defaults=[t1.c.upddef]).values(data="d1") ) eq_( [result.returned_defaults._mapping[k] for k in (t1.c.upddef,)], [1] ) - def test_insert_non_default(self): + def test_insert_non_default(self, connection): """test that a column not marked at all as a default works with this feature.""" t1 = self.tables.t1 - result = testing.db.execute( + result = connection.execute( t1.insert().values(upddef=1).return_defaults(t1.c.data) ) eq_( @@ -387,13 +387,13 @@ class ReturnDefaultsTest(fixtures.TablesTest): [1, None], ) - def test_update_non_default(self): + def test_update_non_default(self, connection): """test that a column not marked at all as a default works with this feature.""" t1 = self.tables.t1 - testing.db.execute(t1.insert().values(upddef=1)) - result = testing.db.execute( + connection.execute(t1.insert().values(upddef=1)) + result = connection.execute( t1.update().values(upddef=2).return_defaults(t1.c.data) ) eq_( @@ -401,9 +401,9 @@ class ReturnDefaultsTest(fixtures.TablesTest): [None], ) - def test_insert_non_default_plus_default(self): + def test_insert_non_default_plus_default(self, connection): t1 = self.tables.t1 - result = testing.db.execute( + result = connection.execute( t1.insert() .values(upddef=1) .return_defaults(t1.c.data, t1.c.insdef) @@ -413,10 +413,10 @@ class ReturnDefaultsTest(fixtures.TablesTest): {"id": 1, "data": None, "insdef": 0}, ) - def test_update_non_default_plus_default(self): + def test_update_non_default_plus_default(self, connection): t1 = self.tables.t1 - testing.db.execute(t1.insert().values(upddef=1)) - result = testing.db.execute( + connection.execute(t1.insert().values(upddef=1)) + result = connection.execute( t1.update() .values(insdef=2) .return_defaults(t1.c.data, t1.c.upddef) @@ -426,9 +426,9 @@ class ReturnDefaultsTest(fixtures.TablesTest): {"data": None, "upddef": 1}, ) - def test_insert_all(self): + def test_insert_all(self, connection): t1 = self.tables.t1 - result = testing.db.execute( + result = connection.execute( t1.insert().values(upddef=1).return_defaults() ) eq_( @@ -436,10 +436,10 @@ class ReturnDefaultsTest(fixtures.TablesTest): {"id": 1, "data": None, "insdef": 0}, ) - def test_update_all(self): + def test_update_all(self, connection): t1 = self.tables.t1 - testing.db.execute(t1.insert().values(upddef=1)) - result = testing.db.execute( + connection.execute(t1.insert().values(upddef=1)) + result = connection.execute( t1.update().values(insdef=2).return_defaults() ) eq_(dict(result.returned_defaults._mapping), {"upddef": 1}) diff --git a/test/sql/test_types.py b/test/sql/test_types.py index 2aef6f2a5..e0c1359b4 100644 --- a/test/sql/test_types.py +++ b/test/sql/test_types.py @@ -547,11 +547,13 @@ class UserDefinedRoundTripTest(_UserDefinedTypeFixture, fixtures.TablesTest): ), ) - def test_processing(self): + def test_processing(self, connection): users = self.tables.users self._data_fixture() - result = users.select().order_by(users.c.user_id).execute().fetchall() + result = connection.execute( + users.select().order_by(users.c.user_id) + ).fetchall() for assertstr, assertint, assertint2, row in zip( [ "BIND_INjackBIND_OUT", @@ -569,7 +571,7 @@ class UserDefinedRoundTripTest(_UserDefinedTypeFixture, fixtures.TablesTest): for col in row[3], row[4]: assert isinstance(col, util.text_type) - def test_plain_in(self): + def test_plain_in(self, connection): users = self.tables.users self._data_fixture() @@ -578,10 +580,10 @@ class UserDefinedRoundTripTest(_UserDefinedTypeFixture, fixtures.TablesTest): .where(users.c.goofy8.in_([15, 9])) .order_by(users.c.user_id) ) - result = testing.db.execute(stmt, {"goofy": [15, 9]}) + result = connection.execute(stmt, {"goofy": [15, 9]}) eq_(result.fetchall(), [(3, 1500), (4, 900)]) - def test_expanding_in(self): + def test_expanding_in(self, connection): users = self.tables.users self._data_fixture() @@ -590,7 +592,7 @@ class UserDefinedRoundTripTest(_UserDefinedTypeFixture, fixtures.TablesTest): .where(users.c.goofy8.in_(bindparam("goofy", expanding=True))) .order_by(users.c.user_id) ) - result = testing.db.execute(stmt, {"goofy": [15, 9]}) + result = connection.execute(stmt, {"goofy": [15, 9]}) eq_(result.fetchall(), [(3, 1500), (4, 900)]) @@ -779,30 +781,30 @@ class TypeCoerceCastTest(fixtures.TablesTest): Table("t", metadata, Column("data", String(50))) - def test_insert_round_trip_cast(self): - self._test_insert_round_trip(cast) + def test_insert_round_trip_cast(self, connection): + self._test_insert_round_trip(cast, connection) - def test_insert_round_trip_type_coerce(self): - self._test_insert_round_trip(type_coerce) + def test_insert_round_trip_type_coerce(self, connection): + self._test_insert_round_trip(type_coerce, connection) - def _test_insert_round_trip(self, coerce_fn): + def _test_insert_round_trip(self, coerce_fn, conn): MyType = self.MyType t = self.tables.t - t.insert().values(data=coerce_fn("d1", MyType)).execute() + conn.execute(t.insert().values(data=coerce_fn("d1", MyType))) eq_( - select([coerce_fn(t.c.data, MyType)]).execute().fetchall(), + conn.execute(select([coerce_fn(t.c.data, MyType)])).fetchall(), [("BIND_INd1BIND_OUT",)], ) - def test_coerce_from_nulltype_cast(self): - self._test_coerce_from_nulltype(cast) + def test_coerce_from_nulltype_cast(self, connection): + self._test_coerce_from_nulltype(cast, connection) - def test_coerce_from_nulltype_type_coerce(self): - self._test_coerce_from_nulltype(type_coerce) + def test_coerce_from_nulltype_type_coerce(self, connection): + self._test_coerce_from_nulltype(type_coerce, connection) - def _test_coerce_from_nulltype(self, coerce_fn): + def _test_coerce_from_nulltype(self, coerce_fn, conn): MyType = self.MyType # test coerce from nulltype - e.g. use an object that @@ -813,144 +815,148 @@ class TypeCoerceCastTest(fixtures.TablesTest): t = self.tables.t - t.insert().values(data=coerce_fn(MyObj(), MyType)).execute() + conn.execute(t.insert().values(data=coerce_fn(MyObj(), MyType))) eq_( - select([coerce_fn(t.c.data, MyType)]).execute().fetchall(), + conn.execute(select([coerce_fn(t.c.data, MyType)])).fetchall(), [("BIND_INTHISISMYOBJBIND_OUT",)], ) - def test_vs_non_coerced_cast(self): - self._test_vs_non_coerced(cast) + def test_vs_non_coerced_cast(self, connection): + self._test_vs_non_coerced(cast, connection) - def test_vs_non_coerced_type_coerce(self): - self._test_vs_non_coerced(type_coerce) + def test_vs_non_coerced_type_coerce(self, connection): + self._test_vs_non_coerced(type_coerce, connection) - def _test_vs_non_coerced(self, coerce_fn): + def _test_vs_non_coerced(self, coerce_fn, conn): MyType = self.MyType t = self.tables.t - t.insert().values(data=coerce_fn("d1", MyType)).execute() + conn.execute(t.insert().values(data=coerce_fn("d1", MyType))) eq_( - select([t.c.data, coerce_fn(t.c.data, MyType)]) - .execute() - .fetchall(), + conn.execute( + select([t.c.data, coerce_fn(t.c.data, MyType)]) + ).fetchall(), [("BIND_INd1", "BIND_INd1BIND_OUT")], ) - def test_vs_non_coerced_alias_cast(self): - self._test_vs_non_coerced_alias(cast) + def test_vs_non_coerced_alias_cast(self, connection): + self._test_vs_non_coerced_alias(cast, connection) - def test_vs_non_coerced_alias_type_coerce(self): - self._test_vs_non_coerced_alias(type_coerce) + def test_vs_non_coerced_alias_type_coerce(self, connection): + self._test_vs_non_coerced_alias(type_coerce, connection) - def _test_vs_non_coerced_alias(self, coerce_fn): + def _test_vs_non_coerced_alias(self, coerce_fn, conn): MyType = self.MyType t = self.tables.t - t.insert().values(data=coerce_fn("d1", MyType)).execute() + conn.execute(t.insert().values(data=coerce_fn("d1", MyType))) eq_( - select([t.c.data.label("x"), coerce_fn(t.c.data, MyType)]) - .alias() - .select() - .execute() - .fetchall(), + conn.execute( + select([t.c.data.label("x"), coerce_fn(t.c.data, MyType)]) + .alias() + .select() + ).fetchall(), [("BIND_INd1", "BIND_INd1BIND_OUT")], ) - def test_vs_non_coerced_where_cast(self): - self._test_vs_non_coerced_where(cast) + def test_vs_non_coerced_where_cast(self, connection): + self._test_vs_non_coerced_where(cast, connection) - def test_vs_non_coerced_where_type_coerce(self): - self._test_vs_non_coerced_where(type_coerce) + def test_vs_non_coerced_where_type_coerce(self, connection): + self._test_vs_non_coerced_where(type_coerce, connection) - def _test_vs_non_coerced_where(self, coerce_fn): + def _test_vs_non_coerced_where(self, coerce_fn, conn): MyType = self.MyType t = self.tables.t - t.insert().values(data=coerce_fn("d1", MyType)).execute() + conn.execute(t.insert().values(data=coerce_fn("d1", MyType))) # coerce on left side eq_( - select([t.c.data, coerce_fn(t.c.data, MyType)]) - .where(coerce_fn(t.c.data, MyType) == "d1") - .execute() - .fetchall(), + conn.execute( + select([t.c.data, coerce_fn(t.c.data, MyType)]).where( + coerce_fn(t.c.data, MyType) == "d1" + ) + ).fetchall(), [("BIND_INd1", "BIND_INd1BIND_OUT")], ) # coerce on right side eq_( - select([t.c.data, coerce_fn(t.c.data, MyType)]) - .where(t.c.data == coerce_fn("d1", MyType)) - .execute() - .fetchall(), + conn.execute( + select([t.c.data, coerce_fn(t.c.data, MyType)]).where( + t.c.data == coerce_fn("d1", MyType) + ) + ).fetchall(), [("BIND_INd1", "BIND_INd1BIND_OUT")], ) - def test_coerce_none_cast(self): - self._test_coerce_none(cast) + def test_coerce_none_cast(self, connection): + self._test_coerce_none(cast, connection) - def test_coerce_none_type_coerce(self): - self._test_coerce_none(type_coerce) + def test_coerce_none_type_coerce(self, connection): + self._test_coerce_none(type_coerce, connection) - def _test_coerce_none(self, coerce_fn): + def _test_coerce_none(self, coerce_fn, conn): MyType = self.MyType t = self.tables.t - t.insert().values(data=coerce_fn("d1", MyType)).execute() + conn.execute(t.insert().values(data=coerce_fn("d1", MyType))) eq_( - select([t.c.data, coerce_fn(t.c.data, MyType)]) - .where(t.c.data == coerce_fn(None, MyType)) - .execute() - .fetchall(), + conn.execute( + select([t.c.data, coerce_fn(t.c.data, MyType)]).where( + t.c.data == coerce_fn(None, MyType) + ) + ).fetchall(), [], ) eq_( - select([t.c.data, coerce_fn(t.c.data, MyType)]) - .where(coerce_fn(t.c.data, MyType) == None) - .execute() # noqa - .fetchall(), + conn.execute( + select([t.c.data, coerce_fn(t.c.data, MyType)]).where( + coerce_fn(t.c.data, MyType) == None + ) + ).fetchall(), # noqa [], ) - def test_resolve_clause_element_cast(self): - self._test_resolve_clause_element(cast) + def test_resolve_clause_element_cast(self, connection): + self._test_resolve_clause_element(cast, connection) - def test_resolve_clause_element_type_coerce(self): - self._test_resolve_clause_element(type_coerce) + def test_resolve_clause_element_type_coerce(self, connection): + self._test_resolve_clause_element(type_coerce, connection) - def _test_resolve_clause_element(self, coerce_fn): + def _test_resolve_clause_element(self, coerce_fn, conn): MyType = self.MyType t = self.tables.t - t.insert().values(data=coerce_fn("d1", MyType)).execute() + conn.execute(t.insert().values(data=coerce_fn("d1", MyType))) class MyFoob(object): def __clause_element__(self): return t.c.data eq_( - testing.db.execute( + conn.execute( select([t.c.data, coerce_fn(MyFoob(), MyType)]) ).fetchall(), [("BIND_INd1", "BIND_INd1BIND_OUT")], ) - def test_cast_replace_col_w_bind(self): - self._test_replace_col_w_bind(cast) + def test_cast_replace_col_w_bind(self, connection): + self._test_replace_col_w_bind(cast, connection) - def test_type_coerce_replace_col_w_bind(self): - self._test_replace_col_w_bind(type_coerce) + def test_type_coerce_replace_col_w_bind(self, connection): + self._test_replace_col_w_bind(type_coerce, connection) - def _test_replace_col_w_bind(self, coerce_fn): + def _test_replace_col_w_bind(self, coerce_fn, conn): MyType = self.MyType t = self.tables.t - t.insert().values(data=coerce_fn("d1", MyType)).execute() + conn.execute(t.insert().values(data=coerce_fn("d1", MyType))) stmt = select([t.c.data, coerce_fn(t.c.data, MyType)]) @@ -967,30 +973,30 @@ class TypeCoerceCastTest(fixtures.TablesTest): # original statement eq_( - testing.db.execute(stmt).fetchall(), + conn.execute(stmt).fetchall(), [("BIND_INd1", "BIND_INd1BIND_OUT")], ) # replaced with binds; CAST can't affect the bound parameter # on the way in here eq_( - testing.db.execute(new_stmt).fetchall(), + conn.execute(new_stmt).fetchall(), [("x", "BIND_INxBIND_OUT")] if coerce_fn is type_coerce else [("x", "xBIND_OUT")], ) - def test_cast_bind(self): - self._test_bind(cast) + def test_cast_bind(self, connection): + self._test_bind(cast, connection) - def test_type_bind(self): - self._test_bind(type_coerce) + def test_type_bind(self, connection): + self._test_bind(type_coerce, connection) - def _test_bind(self, coerce_fn): + def _test_bind(self, coerce_fn, conn): MyType = self.MyType t = self.tables.t - t.insert().values(data=coerce_fn("d1", MyType)).execute() + conn.execute(t.insert().values(data=coerce_fn("d1", MyType))) stmt = select( [ @@ -1002,24 +1008,24 @@ class TypeCoerceCastTest(fixtures.TablesTest): ) eq_( - testing.db.execute(stmt).fetchall(), + conn.execute(stmt).fetchall(), [("x", "BIND_INxBIND_OUT")] if coerce_fn is type_coerce else [("x", "xBIND_OUT")], ) - def test_cast_existing_typed(self): + def test_cast_existing_typed(self, connection): MyType = self.MyType coerce_fn = cast # when cast() is given an already typed value, # the type does not take effect on the value itself. eq_( - testing.db.scalar(select([coerce_fn(literal("d1"), MyType)])), + connection.scalar(select([coerce_fn(literal("d1"), MyType)])), "d1BIND_OUT", ) - def test_type_coerce_existing_typed(self): + def test_type_coerce_existing_typed(self, connection): MyType = self.MyType coerce_fn = type_coerce t = self.tables.t @@ -1027,10 +1033,14 @@ class TypeCoerceCastTest(fixtures.TablesTest): # type_coerce does upgrade the given expression to the # given type. - t.insert().values(data=coerce_fn(literal("d1"), MyType)).execute() + connection.execute( + t.insert().values(data=coerce_fn(literal("d1"), MyType)) + ) eq_( - select([coerce_fn(t.c.data, MyType)]).execute().fetchall(), + connection.execute( + select([coerce_fn(t.c.data, MyType)]) + ).fetchall(), [("BIND_INd1BIND_OUT",)], ) @@ -1449,74 +1459,80 @@ class EnumTest(AssertsCompiledSQL, fixtures.TablesTest): "foo", ) - def test_validators_not_in_like_roundtrip(self): + def test_validators_not_in_like_roundtrip(self, connection): enum_table = self.tables["non_native_enum_table"] - enum_table.insert().execute( + connection.execute( + enum_table.insert(), [ {"id": 1, "someenum": "two"}, {"id": 2, "someenum": "two"}, {"id": 3, "someenum": "one"}, - ] + ], ) eq_( - enum_table.select() - .where(enum_table.c.someenum.like("%wo%")) - .order_by(enum_table.c.id) - .execute() - .fetchall(), + connection.execute( + enum_table.select() + .where(enum_table.c.someenum.like("%wo%")) + .order_by(enum_table.c.id) + ).fetchall(), [(1, "two", None), (2, "two", None)], ) - def test_validators_not_in_concatenate_roundtrip(self): + def test_validators_not_in_concatenate_roundtrip(self, connection): enum_table = self.tables["non_native_enum_table"] - enum_table.insert().execute( + connection.execute( + enum_table.insert(), [ {"id": 1, "someenum": "two"}, {"id": 2, "someenum": "two"}, {"id": 3, "someenum": "one"}, - ] + ], ) eq_( - select(["foo" + enum_table.c.someenum]) - .order_by(enum_table.c.id) - .execute() - .fetchall(), + connection.execute( + select(["foo" + enum_table.c.someenum]).order_by( + enum_table.c.id + ) + ).fetchall(), [("footwo",), ("footwo",), ("fooone",)], ) - def test_round_trip(self): + def test_round_trip(self, connection): enum_table = self.tables["enum_table"] - enum_table.insert().execute( + connection.execute( + enum_table.insert(), [ {"id": 1, "someenum": "two"}, {"id": 2, "someenum": "two"}, {"id": 3, "someenum": "one"}, - ] + ], ) eq_( - enum_table.select().order_by(enum_table.c.id).execute().fetchall(), + connection.execute( + enum_table.select().order_by(enum_table.c.id) + ).fetchall(), [(1, "two"), (2, "two"), (3, "one")], ) - def test_null_round_trip(self): + def test_null_round_trip(self, connection): enum_table = self.tables.enum_table non_native_enum_table = self.tables.non_native_enum_table - with testing.db.connect() as conn: - conn.execute(enum_table.insert(), {"id": 1, "someenum": None}) - eq_(conn.scalar(select([enum_table.c.someenum])), None) + connection.execute(enum_table.insert(), {"id": 1, "someenum": None}) + eq_(connection.scalar(select([enum_table.c.someenum])), None) - with testing.db.connect() as conn: - conn.execute( - non_native_enum_table.insert(), {"id": 1, "someenum": None} - ) - eq_(conn.scalar(select([non_native_enum_table.c.someenum])), None) + connection.execute( + non_native_enum_table.insert(), {"id": 1, "someenum": None} + ) + eq_( + connection.scalar(select([non_native_enum_table.c.someenum])), None + ) @testing.requires.enforces_check_constraints def test_check_constraint(self, connection): @@ -1619,24 +1635,27 @@ class EnumTest(AssertsCompiledSQL, fixtures.TablesTest): select([self.tables.non_native_enum_table.c.someotherenum]), ) - def test_non_native_round_trip(self): + def test_non_native_round_trip(self, connection): non_native_enum_table = self.tables["non_native_enum_table"] - non_native_enum_table.insert().execute( + connection.execute( + non_native_enum_table.insert(), [ {"id": 1, "someenum": "two"}, {"id": 2, "someenum": "two"}, {"id": 3, "someenum": "one"}, - ] + ], ) eq_( - select( - [non_native_enum_table.c.id, non_native_enum_table.c.someenum] - ) - .order_by(non_native_enum_table.c.id) - .execute() - .fetchall(), + connection.execute( + select( + [ + non_native_enum_table.c.id, + non_native_enum_table.c.someenum, + ] + ).order_by(non_native_enum_table.c.id) + ).fetchall(), [(1, "two"), (2, "two"), (3, "one")], ) @@ -1679,10 +1698,11 @@ class EnumTest(AssertsCompiledSQL, fixtures.TablesTest): typ = Enum(self.SomeEnum, sort_key_function=None) is_(typ.sort_key_function, None) - def test_pep435_enum_round_trip(self): + def test_pep435_enum_round_trip(self, connection): stdlib_enum_table = self.tables["stdlib_enum_table"] - stdlib_enum_table.insert().execute( + connection.execute( + stdlib_enum_table.insert(), [ {"id": 1, "someenum": self.SomeEnum.two}, {"id": 2, "someenum": self.SomeEnum.two}, @@ -1691,14 +1711,13 @@ class EnumTest(AssertsCompiledSQL, fixtures.TablesTest): {"id": 5, "someenum": self.SomeEnum.four}, {"id": 6, "someenum": "three"}, {"id": 7, "someenum": "four"}, - ] + ], ) eq_( - stdlib_enum_table.select() - .order_by(stdlib_enum_table.c.id) - .execute() - .fetchall(), + connection.execute( + stdlib_enum_table.select().order_by(stdlib_enum_table.c.id) + ).fetchall(), [ (1, self.SomeEnum.two), (2, self.SomeEnum.two), @@ -1710,22 +1729,24 @@ class EnumTest(AssertsCompiledSQL, fixtures.TablesTest): ], ) - def test_pep435_enum_values_callable_round_trip(self): + def test_pep435_enum_values_callable_round_trip(self, connection): stdlib_enum_table_custom_values = self.tables["stdlib_enum_table2"] - stdlib_enum_table_custom_values.insert().execute( + connection.execute( + stdlib_enum_table_custom_values.insert(), [ {"id": 1, "someotherenum": self.SomeOtherEnum.AMember}, {"id": 2, "someotherenum": self.SomeOtherEnum.BMember}, {"id": 3, "someotherenum": self.SomeOtherEnum.AMember}, - ] + ], ) eq_( - stdlib_enum_table_custom_values.select() - .order_by(stdlib_enum_table_custom_values.c.id) - .execute() - .fetchall(), + connection.execute( + stdlib_enum_table_custom_values.select().order_by( + stdlib_enum_table_custom_values.c.id + ) + ).fetchall(), [ (1, self.SomeOtherEnum.AMember), (2, self.SomeOtherEnum.BMember), @@ -1733,15 +1754,16 @@ class EnumTest(AssertsCompiledSQL, fixtures.TablesTest): ], ) - def test_pep435_enum_expanding_in(self): + def test_pep435_enum_expanding_in(self, connection): stdlib_enum_table_custom_values = self.tables["stdlib_enum_table2"] - stdlib_enum_table_custom_values.insert().execute( + connection.execute( + stdlib_enum_table_custom_values.insert(), [ {"id": 1, "someotherenum": self.SomeOtherEnum.one}, {"id": 2, "someotherenum": self.SomeOtherEnum.two}, {"id": 3, "someotherenum": self.SomeOtherEnum.three}, - ] + ], ) stmt = ( @@ -1754,7 +1776,7 @@ class EnumTest(AssertsCompiledSQL, fixtures.TablesTest): .order_by(stdlib_enum_table_custom_values.c.id) ) eq_( - testing.db.execute( + connection.execute( stmt, {"member": [self.SomeOtherEnum.one, self.SomeOtherEnum.three]}, ).fetchall(), @@ -1851,10 +1873,11 @@ class EnumTest(AssertsCompiledSQL, fixtures.TablesTest): dialect="default", ) - def test_lookup_failure(self): + def test_lookup_failure(self, connection): assert_raises( exc.StatementError, - self.tables["non_native_enum_table"].insert().execute, + connection.execute, + self.tables["non_native_enum_table"].insert(), {"id": 4, "someotherenum": "four"}, ) @@ -1960,21 +1983,23 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults): @engines.close_first def teardown(self): - binary_table.delete().execute() + with testing.db.connect() as conn: + conn.execute(binary_table.delete()) @classmethod def teardown_class(cls): metadata.drop_all() @testing.requires.non_broken_binary - def test_round_trip(self): + def test_round_trip(self, connection): testobj1 = pickleable.Foo("im foo 1") testobj2 = pickleable.Foo("im foo 2") testobj3 = pickleable.Foo("im foo 3") stream1 = self.load_stream("binary_data_one.dat") stream2 = self.load_stream("binary_data_two.dat") - binary_table.insert().execute( + connection.execute( + binary_table.insert(), primary_id=1, misc="binary_data_one.dat", data=stream1, @@ -1982,14 +2007,16 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults): pickled=testobj1, mypickle=testobj3, ) - binary_table.insert().execute( + connection.execute( + binary_table.insert(), primary_id=2, misc="binary_data_two.dat", data=stream2, data_slice=stream2[0:99], pickled=testobj2, ) - binary_table.insert().execute( + connection.execute( + binary_table.insert(), primary_id=3, misc="binary_data_two.dat", data=None, @@ -2011,7 +2038,7 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults): } ), ): - result = stmt.execute().fetchall() + result = connection.execute(stmt).fetchall() eq_(stream1, result[0]._mapping["data"]) eq_(stream1[0:100], result[0]._mapping["data_slice"]) eq_(stream2, result[1]._mapping["data"]) @@ -2023,28 +2050,29 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults): ) @testing.requires.binary_comparisons - def test_comparison(self): + def test_comparison(self, connection): """test that type coercion occurs on comparison for binary""" expr = binary_table.c.data == "foo" assert isinstance(expr.right.type, LargeBinary) data = os.urandom(32) - binary_table.insert().execute(data=data) + connection.execute(binary_table.insert(), data=data) eq_( - select([func.count("*")]) - .select_from(binary_table) - .where(binary_table.c.data == data) - .scalar(), + connection.scalar( + select([func.count("*")]) + .select_from(binary_table) + .where(binary_table.c.data == data) + ), 1, ) @testing.requires.binary_literals - def test_literal_roundtrip(self): + def test_literal_roundtrip(self, connection): compiled = select([cast(literal(util.b("foo")), LargeBinary)]).compile( dialect=testing.db.dialect, compile_kwargs={"literal_binds": True} ) - result = testing.db.execute(compiled) + result = connection.execute(compiled) eq_(result.scalar(), util.b("foo")) def test_bind_processor_no_dbapi(self): @@ -2303,15 +2331,17 @@ class ExpressionTest( meta.create_all() - test_table.insert().execute( - { - "id": 1, - "data": "somedata", - "atimestamp": datetime.date(2007, 10, 15), - "avalue": 25, - "bvalue": "foo", - } - ) + with testing.db.connect() as conn: + conn.execute( + test_table.insert(), + { + "id": 1, + "data": "somedata", + "atimestamp": datetime.date(2007, 10, 15), + "avalue": 25, + "bvalue": "foo", + }, + ) @classmethod def teardown_class(cls): @@ -2336,13 +2366,13 @@ class ExpressionTest( ], ) - def test_bind_adapt(self): + def test_bind_adapt(self, connection): # test an untyped bind gets the left side's type expr = test_table.c.atimestamp == bindparam("thedate") eq_(expr.right.type._type_affinity, Date) eq_( - testing.db.execute( + connection.execute( select( [ test_table.c.id, @@ -2359,7 +2389,7 @@ class ExpressionTest( eq_(expr.right.type._type_affinity, MyCustomType) eq_( - testing.db.execute( + connection.execute( test_table.select().where(expr), {"somevalue": 25} ).fetchall(), [ @@ -2377,7 +2407,7 @@ class ExpressionTest( eq_(expr.right.type._type_affinity, String) eq_( - testing.db.execute( + connection.execute( test_table.select().where(expr), {"somevalue": "foo"} ).fetchall(), [ @@ -2455,14 +2485,14 @@ class ExpressionTest( def test_actual_literal_adapters(self, data, expected): is_(literal(data).type.__class__, expected) - def test_typedec_operator_adapt(self): + def test_typedec_operator_adapt(self, connection): expr = test_table.c.bvalue + "hi" assert expr.type.__class__ is MyTypeDec assert expr.right.type.__class__ is MyTypeDec eq_( - testing.db.execute(select([expr.label("foo")])).scalar(), + connection.execute(select([expr.label("foo")])).scalar(), "BIND_INfooBIND_INhiBIND_OUT", ) @@ -2503,7 +2533,7 @@ class ExpressionTest( dialect=default.DefaultDialect(supports_native_boolean=True), ) - def test_typedec_righthand_coercion(self): + def test_typedec_righthand_coercion(self, connection): class MyTypeDec(types.TypeDecorator): impl = String @@ -2524,7 +2554,7 @@ class ExpressionTest( is_(expr.type.__class__, MyTypeDec) eq_( - testing.db.execute(select([expr.label("foo")])).scalar(), + connection.execute(select([expr.label("foo")])).scalar(), "BIND_INfooBIND_IN6BIND_OUT", ) @@ -2672,12 +2702,12 @@ class ExpressionTest( expr = bindparam("bar") + bindparam("foo") eq_(expr.type, types.NULLTYPE) - def test_distinct(self): + def test_distinct(self, connection): s = select([distinct(test_table.c.avalue)]) - eq_(testing.db.execute(s).scalar(), 25) + eq_(connection.execute(s).scalar(), 25) s = select([test_table.c.avalue.distinct()]) - eq_(testing.db.execute(s).scalar(), 25) + eq_(connection.execute(s).scalar(), 25) assert distinct(test_table.c.data).type == test_table.c.data.type assert test_table.c.data.distinct().type == test_table.c.data.type @@ -2828,7 +2858,8 @@ class NumericRawSQLTest(fixtures.TestBase): def _fixture(self, metadata, type_, data): t = Table("t", metadata, Column("val", type_)) metadata.create_all() - t.insert().execute(val=data) + with testing.db.connect() as conn: + conn.execute(t.insert(), val=data) @testing.fails_on("sqlite", "Doesn't provide Decimal results natively") @testing.provide_metadata @@ -2898,7 +2929,8 @@ class IntervalTest(fixtures.TestBase, AssertsExecutionResults): @engines.close_first def teardown(self): - interval_table.delete().execute() + with testing.db.connect() as conn: + conn.execute(interval_table.delete()) @classmethod def teardown_class(cls): diff --git a/test/sql/test_utils.py b/test/sql/test_utils.py index 1ccd1e123..a63e55c4e 100644 --- a/test/sql/test_utils.py +++ b/test/sql/test_utils.py @@ -30,3 +30,21 @@ class MiscTest(fixtures.TestBase): subset_select = select([common.c.id, common.c.data]).alias() eq_(sql_util.find_tables(subset_select), [common]) + + def test_find_tables_aliases(self): + metadata = MetaData() + common = Table( + "common", + metadata, + Column("id", Integer, primary_key=True), + Column("data", Integer), + Column("extra", String(45)), + ) + + calias = common.alias() + subset_select = select([common.c.id, calias.c.data]).subquery() + + eq_( + set(sql_util.find_tables(subset_select, include_aliases=True)), + {common, calias, subset_select}, + ) |
