summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJason R. Coombs <jaraco@jaraco.com>2016-04-26 11:52:22 -0400
committerJason R. Coombs <jaraco@jaraco.com>2016-04-26 11:52:22 -0400
commitb35f7cdf9f0af923af0385c9878a23d3e6494d24 (patch)
tree84a5a44163fabd4b4afd68d183831ad1e20b80b5
parent661440a9248792656455022b6fba51343e58cd50 (diff)
downloadpython-setuptools-git-b35f7cdf9f0af923af0385c9878a23d3e6494d24.tar.gz
Remove all but ez_setup.py from bootstrap branch.
-rw-r--r--.gitignore15
-rw-r--r--.hgignore15
-rw-r--r--.hgtags260
-rw-r--r--.travis.yml37
-rw-r--r--CHANGES.rst2533
-rw-r--r--MANIFEST.in12
-rwxr-xr-xREADME.rst236
-rw-r--r--bootstrap.py57
-rw-r--r--conftest.py1
-rw-r--r--docs/Makefile75
-rw-r--r--docs/_templates/indexsidebar.html8
-rw-r--r--docs/_theme/nature/static/nature.css_t237
-rw-r--r--docs/_theme/nature/static/pygments.css54
-rw-r--r--docs/_theme/nature/theme.conf4
-rw-r--r--docs/conf.py263
-rw-r--r--docs/developer-guide.txt125
-rw-r--r--docs/development.txt35
-rw-r--r--docs/easy_install.txt1625
-rw-r--r--docs/formats.txt682
-rw-r--r--docs/history.txt8
-rw-r--r--docs/index.txt25
-rw-r--r--docs/pkg_resources.txt1952
-rw-r--r--docs/python3.txt94
-rw-r--r--docs/releases.txt56
-rw-r--r--docs/roadmap.txt6
-rw-r--r--docs/setuptools.txt2663
-rwxr-xr-xeasy_install.py5
-rwxr-xr-xlauncher.c335
-rw-r--r--msvc-build-launcher.cmd55
-rw-r--r--pavement.py28
-rw-r--r--pkg_resources/__init__.py2956
-rw-r--r--pkg_resources/_vendor/__init__.py0
-rw-r--r--pkg_resources/_vendor/packaging/__about__.py21
-rw-r--r--pkg_resources/_vendor/packaging/__init__.py14
-rw-r--r--pkg_resources/_vendor/packaging/_compat.py30
-rw-r--r--pkg_resources/_vendor/packaging/_structures.py68
-rw-r--r--pkg_resources/_vendor/packaging/markers.py287
-rw-r--r--pkg_resources/_vendor/packaging/requirements.py127
-rw-r--r--pkg_resources/_vendor/packaging/specifiers.py774
-rw-r--r--pkg_resources/_vendor/packaging/utils.py14
-rw-r--r--pkg_resources/_vendor/packaging/version.py393
-rw-r--r--pkg_resources/_vendor/pyparsing.py3805
-rw-r--r--pkg_resources/_vendor/six.py868
-rw-r--r--pkg_resources/_vendor/vendored.txt3
-rw-r--r--pkg_resources/api_tests.txt401
-rw-r--r--pkg_resources/extern/__init__.py71
-rw-r--r--pkg_resources/tests/__init__.py0
-rw-r--r--pkg_resources/tests/test_markers.py10
-rw-r--r--pkg_resources/tests/test_pkg_resources.py169
-rw-r--r--pkg_resources/tests/test_resources.py834
-rwxr-xr-xpytest.ini3
-rwxr-xr-xsetup.cfg32
-rwxr-xr-xsetup.py165
-rw-r--r--setuptools/__init__.py169
-rwxr-xr-xsetuptools/archive_util.py170
-rw-r--r--setuptools/cli-32.exebin65536 -> 0 bytes
-rw-r--r--setuptools/cli-64.exebin74752 -> 0 bytes
-rw-r--r--setuptools/cli-arm-32.exebin69120 -> 0 bytes
-rw-r--r--setuptools/cli.exebin65536 -> 0 bytes
-rw-r--r--setuptools/command/__init__.py18
-rwxr-xr-xsetuptools/command/alias.py80
-rw-r--r--setuptools/command/bdist_egg.py471
-rwxr-xr-xsetuptools/command/bdist_rpm.py43
-rwxr-xr-xsetuptools/command/bdist_wininst.py21
-rw-r--r--setuptools/command/build_ext.py296
-rw-r--r--setuptools/command/build_py.py222
-rwxr-xr-xsetuptools/command/develop.py196
-rwxr-xr-xsetuptools/command/easy_install.py2263
-rwxr-xr-xsetuptools/command/egg_info.py489
-rw-r--r--setuptools/command/install.py125
-rwxr-xr-xsetuptools/command/install_egg_info.py118
-rw-r--r--setuptools/command/install_lib.py120
-rwxr-xr-xsetuptools/command/install_scripts.py60
-rw-r--r--setuptools/command/launcher manifest.xml15
-rwxr-xr-xsetuptools/command/register.py10
-rwxr-xr-xsetuptools/command/rotate.py66
-rwxr-xr-xsetuptools/command/saveopts.py22
-rwxr-xr-xsetuptools/command/sdist.py196
-rwxr-xr-xsetuptools/command/setopt.py150
-rw-r--r--setuptools/command/test.py196
-rw-r--r--setuptools/command/upload.py38
-rw-r--r--setuptools/command/upload_docs.py191
-rw-r--r--setuptools/depends.py217
-rw-r--r--setuptools/dist.py872
-rw-r--r--setuptools/extension.py57
-rw-r--r--setuptools/extern/__init__.py5
-rw-r--r--setuptools/gui-32.exebin65536 -> 0 bytes
-rw-r--r--setuptools/gui-64.exebin75264 -> 0 bytes
-rw-r--r--setuptools/gui-arm-32.exebin69120 -> 0 bytes
-rw-r--r--setuptools/gui.exebin65536 -> 0 bytes
-rw-r--r--setuptools/launch.py35
-rw-r--r--setuptools/lib2to3_ex.py58
-rw-r--r--setuptools/msvc9_support.py63
-rwxr-xr-xsetuptools/package_index.py1069
-rw-r--r--setuptools/py26compat.py22
-rw-r--r--setuptools/py27compat.py15
-rw-r--r--setuptools/py31compat.py52
-rwxr-xr-xsetuptools/sandbox.py496
-rw-r--r--setuptools/script (dev).tmpl5
-rw-r--r--setuptools/script.tmpl3
-rw-r--r--setuptools/site-patch.py76
-rw-r--r--setuptools/ssl_support.py243
-rw-r--r--setuptools/tests/__init__.py328
-rw-r--r--setuptools/tests/contexts.py98
-rw-r--r--setuptools/tests/environment.py60
-rw-r--r--setuptools/tests/files.py32
-rw-r--r--setuptools/tests/fixtures.py27
-rw-r--r--setuptools/tests/indexes/test_links_priority/external.html3
-rw-r--r--setuptools/tests/indexes/test_links_priority/simple/foobar/index.html4
-rw-r--r--setuptools/tests/py26compat.py14
-rw-r--r--setuptools/tests/script-with-bom.py3
-rw-r--r--setuptools/tests/server.py68
-rw-r--r--setuptools/tests/test_bdist_egg.py43
-rw-r--r--setuptools/tests/test_build_ext.py18
-rw-r--r--setuptools/tests/test_develop.py115
-rw-r--r--setuptools/tests/test_dist_info.py73
-rw-r--r--setuptools/tests/test_easy_install.py599
-rw-r--r--setuptools/tests/test_egg_info.py249
-rw-r--r--setuptools/tests/test_find_packages.py170
-rw-r--r--setuptools/tests/test_integration.py99
-rw-r--r--setuptools/tests/test_msvc9compiler.py179
-rw-r--r--setuptools/tests/test_packageindex.py225
-rw-r--r--setuptools/tests/test_sandbox.py141
-rw-r--r--setuptools/tests/test_sdist.py423
-rw-r--r--setuptools/tests/test_setuptools.py48
-rw-r--r--setuptools/tests/test_test.py92
-rw-r--r--setuptools/tests/test_unicode_utils.py10
-rw-r--r--setuptools/tests/test_upload_docs.py59
-rw-r--r--setuptools/tests/test_windows_wrappers.py183
-rw-r--r--setuptools/tests/textwrap.py8
-rw-r--r--setuptools/unicode_utils.py43
-rw-r--r--setuptools/utils.py11
-rw-r--r--setuptools/version.py6
-rw-r--r--setuptools/windows_support.py29
-rw-r--r--tests/manual_test.py92
-rw-r--r--tox.ini5
136 files changed, 0 insertions, 35136 deletions
diff --git a/.gitignore b/.gitignore
deleted file mode 100644
index 4d77520f..00000000
--- a/.gitignore
+++ /dev/null
@@ -1,15 +0,0 @@
-# syntax: glob
-bin
-build
-dist
-include
-lib
-distribute.egg-info
-setuptools.egg-info
-.coverage
-.tox
-*.egg
-*.py[cod]
-*.swp
-*~
-.hg*
diff --git a/.hgignore b/.hgignore
deleted file mode 100644
index ebc53b33..00000000
--- a/.hgignore
+++ /dev/null
@@ -1,15 +0,0 @@
-syntax: glob
-bin
-build
-dist
-include
-lib
-distribute.egg-info
-setuptools.egg-info
-.coverage
-.tox
-*.egg
-*.py[cod]
-*.swp
-*~
-.git*
diff --git a/.hgtags b/.hgtags
deleted file mode 100644
index 61bd9da6..00000000
--- a/.hgtags
+++ /dev/null
@@ -1,260 +0,0 @@
-7e9441311eb21dd1fbc32cfbad58168e46c5450e 0.6
-26f429772565f69d1f6d21adf57c3d8c40197129 0.6.1
-6f46749a7454be6e044a54cd73c51318b74bdee8 0.6.2
-34b80fb58862d18f8f957f98a883ed4a72d06f8e 0.6.3
-fb04abddb50d82a9005c9082c94d5eb983be1d79 0.6.4
-8ae0bd250b4a0d58cbaf16b4354ad60f73f24a01 0.6.5
-88847883dfed39829d3a5ed292ad540723ad31cc 0.6.6
-fcbef325349ada38f6c674eb92db82664cf6437c 0.6.7
-3af7f2b8270b9bb34fb65f08ee567bfe8e2a6a5a 0.6.8
-669725d03fd1e345ea47590e9b14cb19742b96a2 0.6.9
-eff3ca9c2d8d39e24c221816c52a37f964535336 0.6.10
-88710e34b91c98c9348749722cce3acd574d177d 0.6.11
-5ce754773a43ac21f7bd13872f45c75e27b593f8 0.6.12
-de36566d35e51bee7cfc86ffa694795e52f4147c 0.6.13
-e5f3f0ffe9e1a243d49a06f26c79dd160f521483 0.6.14
-dc03a300ec7a89ad773047172d43e52b34e7cd1e 0.6.15
-e620fb4ee8ba17debadb614fb583c6dfac229dea 0.6.16
-21df276275b5a47c6a994927d69ad3d90cf62b5d 0.6.17
-e9264ca4ba8c24239c36a8426a0394f7c7d5dd83 0.6.18
-aed31b1fa47ed1f39e55c75b76bbbdb80775b7f1 0.6.19
-c6e6273587816c3e486ef7739e53c864a0145251 0.6.20
-7afdf4c84a713fe151e6163ab25d45e8727ce653 0.6.21
-105066342777cd1319a95d7ae0271a2ea1ac33fe 0.6.23
-7b5ef4e6c80e82541dffb5a9a130d81550d5a835 0.6.24
-9c014a80f32e532371826ed1dc3236975f37f371 0.6.25
-ff8c4d6c8e5d2093750a58a3d43b76556570007c 0.6.26
-2a5c42ed097a195e398b97261c40cd66c8da8913 0.6.27
-4ed34b38851f90278cfe2bff75784f7e32883725 0.6.28
-acecfa2cfb6fca207dd2f4e025c695def3bb6b40 0.6.29
-e950f50addff150859f5990b9df2a33c691b6354 0.6.30
-06dae3faee2de50ff17b90719df410b2ebc5b71e 0.6.31
-1f4f79258ed5b418f680a55d3006f41aa6a56d2b 0.6.32
-89f57bf1406a5e745470af35446902c21ac9b6f6 0.6.33
-3c8f9fc13862124cf20ef2ff2140254fb272bb94 0.6.34
-7c3f8b9eb7cfa17481c835d5caaa918d337c7a83 0.6.35
-192094c0d1e2e5d2cb5c718f84a36c9de04b314b 0.6.36
-66d4e3b8899166e4c04189ee1831c649b7ff38bf 0.6.37
-398d58aa8bba33778c30ce72055a27d4b425809c 0.6.38
-f457fc2a3ebe609d8ca7a869eb65b7506ecf49ef 0.6.39
-9b2e2aa06e058c63e06c5e42a7f279ddae2dfb7d 0.7b1
-9089a40343981baa593b9bb5953f9088e9507099 0.6.40
-ad107e9b4beea24516ac4e1e854696e586fe279d 0.6.41
-f30167716b659f96c5e0b7ea3d5be2bcff8c0eac 0.6.42
-8951daac6c1bc7b24c7fb054fd369f2c5b88cdb3 0.7b2
-35086ee286732b0f63d2be18d9f26f2734586e2d 0.6.43
-63e4eb2d61204f77f9b557201a0efa187b05a611 0.7b3
-73aa98aee6bbc4a9d19a334a8ac928dece7799c6 0.6.44
-53b4ac9a748aa28893aaca42c41e5e99568667bb 0.7b4
-ddca71ae5ceb9b14512dc60ea83802c10e224cf0 0.6.45
-7f2c08e9ca22023d1499c512fccc1513813b7dc4 0.7
-024dd30ed702135f5328975042566e48cc479d7d 0.7.1
-d04c05f035e3a5636006fc34f4be7e6c77035d17 0.7.2
-d212e48e0cef689acba57ed017289c027660b23c 0.7.3
-74c6c12268059986f9cc0b535399594f1d131201 0.8b1
-85640475dda0621f20e11db0995fa07f51744a98 0.7.4
-b57e5ba934767dd498669b17551678081b3047b5 0.6.46
-dd5bbc116c53d3732d22f983e7ca6d8cfabd3b08 0.7.5
-512744f3f306aea0fdde4cfd600af8b2d6e773e7 0.8b2
-8af9839a76407eebf3610fcd3e7973f1625abaa2 0.8b3
-ee2c967017024197b38e39ced852808265387a4b 0.6.47
-48d3d26cbea68e21c96e51f01092e8fdead5cd60 0.7.6
-5b3c7981a02b4a86af1b10ae16492899b515d485 0.8b4
-cae9127e0534fc46d7ddbc11f68dc88fd9311459 0.6.48
-1506fa538fff01e70424530a32a44e070720cf3c 0.7.7
-5679393794978a1d3e1e087472b8a0fdf3d8423c 0.8b5
-26f59ec0f0f69714d28a891aaad048e3b9fcd6f7 0.8b6
-f657df1f1ed46596d236376649c99a470662b4ba 0.6.49
-236de1de68b14230036147c7c9e7c09b215b53ee 0.7.8
-979d598822bc64b05fb177a2ba221e75ee5b44d3 0.8b7
-e3d70539e79f39a97f69674ab038661961a1eb43 0.8
-3078b1e566399bf0c5590f3528df03d0c23a0777 0.9
-9e5a8f734662dd36e6fd6e4ba9031d0e2d294632 0.9.1
-37444bb32e172aaacbc0aeafdf5a778ee471723d 0.9.2
-3e9d2e89de3aa499382d6be2ec8b64d2a29f7f13 0.9.3
-1aef141fc968113e4c521d1edf6ea863c4ff7e00 0.9.4
-88e3d6788facbb2dd6467a23c4f35529a5ce20a1 0.9.5
-acc6c5d61d0f82040c237ac7ea010c0fc9e67d66 0.9.6
-19965a03c1d5231c894e0fabfaf45af1fd99f484 0.9.7
-e0a6e225ad6b28471cd42cfede6e8a334bb548fb 0.9.8
-7b91ff93a30ef78634b7bb34f4a6229a5de281ee 1.0b1
-aba16323ec9382da7bc77c633990ccb3bd58d050 1.0b2
-8a98492f0d852402c93ddbbf3f07081909a9105f 1.0b3
-c385fdf1f976fb1d2a6accc9292d8eca419180fa 1.0
-d943b67fe80dbd61326014e4acedfc488adfa1c9 1.1
-2e42e86546100c9f6845b04eb31b75c5add05f78 1.1.1
-462fe5ccd8befeb2a235e8295d6d73eb3a49cc78 1.1.2
-ddf3561d6a54087745f4bf6ea2048b86195d6fe2 1.1.3
-f94c7e4fa03077e069c1c3cef93ead735559e706 1.1.4
-d9bb58331007ee3f69d31983a180f56b15c731c3 1.1.5
-5e426bdeb46b87e299422adc419f4163b6c78d13 1.1.6
-cc9b19cd0ec64e44308a852e9b9fdc6026ea2e46 1.1.7
-4c7dc4ae2440ae3e9ba26b4a12ffca3407e7030d 1.2b1
-77921bbe3931caf40474dc36e55d3d541981c749 1.2
-19873119647deae8a68e9ed683317b9ee170a8d8 1.3
-a197b626075a8c2e393a08c42a20bd2624a41092 1.3.1
-076b472a9e3f840021e9d5509878337e6e5fcd89 1.3.2
-0d1bdb99a535a2c7ed4edd37141fb0b54348b713 1.4b1
-a13f8c18ce742bc83c794b9eea57980cb94ae18a 1.4
-9a5f26d7df8ef779cb5f40cc0389343fb4c61365 1.4.1
-274cb3beba4f22d5f461b0578b6d56e171d94f2e 1.4.2
-0bb1df93c2eaa50e95ccfce18208b0cca20ebae3 2.0
-bbdba51e1bc1779728ed351529252f73543ace65 2.0.1
-5a62ac60ba31d249db1cfcff31d85ca26421be6d 2.0.2
-c49c651997ebec3b40b71139e8a6a6a15c62c848 2.1
-b5be6c2b828cb92d27f52fccc725ce86a37e9ce0 2.1.1
-ab1c2a26e06f2a2006e8e867e4d41ccf1d6cf9b2 2.2b1
-caab085e829f29679d0e47430b2761af6b20fc76 2.1.2
-39f7ef5ef22183f3eba9e05a46068e1d9fd877b0 2.2
-faba785e9b9e05ba890d0851ef1f3287c32fcac2 3.0b1
-8e8c50925f18eafb7e66fe020aa91a85b9a4b122 3.0
-cd9e857476ac70515f7436f846b593f696ac672d 3.0.1
-bad1f30ee0dfa7a2af4f428d06f62efa39ca48db 3.0.2
-47224d55ddc6bb08c1d17a219f124d0d9c524491 3.1
-07c459bea1c58ff52e0576fc29c1865d18a83b09 3.2
-b306e681a945406833fb297ae10241e2241fc22b 3.3
-78c8cfbe3e1017d1653c48f7306b2c4b4911bf1a 4.0b1
-5cb90066d98700e6d37a01d95c4a2090e730ae02 3.4
-e39de2d3eb774b70c023a1151758213cc9ed2178 3.4.1
-369f6f90f69683702cc0b72827ccf949977808b0 3.4.2
-06a56e063c327b0606f9e9690764279d424646b2 3.4.3
-0917d575d26091a184796624743825914994bf95 3.4.4
-98f29d521c3a57bae0090d2bc5597d93db95b108 3.5
-254d8c625f4620993ce2d2b21212ba01cf307fe6 3.5.1
-572201d08eadc59210f6f0f28f9dc79f906672d3 3.5.2
-e94e768594a1405efde0b79cc60549dd8a4cda9a 3.6
-292dfca15d33e72a862d044183a6ad7c06862a19 3.7b1
-49bd27eebf212c067392796bb2d0fa6d8e583586 3.7
-2fa97c06cc013a9c82f4c1219711e72238d5b6e6 3.8
-9b422fc0b8b97cdb62f02d754283f747adef7f83 3.7.1
-40744de29b848f0e88139ba91d645c08a56855e9 3.8.1
-84d936fd18a93d16c46e68ee2e39f5733f3cd863 5.0
-871bd7b4326f48860ebe0baccdaea8fe4f8f8583 5.0.1
-95996b713722376679c3168b15ab12ea8360dd5f 5.0.2
-3a948b6d01e3449b478fcdc532c44eb3cea5ee10 5.1
-f493e6c4ffd88951871110858c141385305e0077 5.2
-1f9505cfd7524ce0c83ab31d139f47b39c56ccbe 5.3
-baae103e80c307008b156e426a07eb9f486eb4f0 5.4
-ba3b08c7bffd6123e1a7d58994f15e8051a67cb7 5.4.1
-7adcf1397f6eccb9e73eda294343de2943f7c8fb 5.4.2
-68910a89f97a508a64f9f235dc64ad43d4477ea0 5.5
-949a66af4f03521e1404deda940aa951418a13d2 5.5.1
-a1fc0220bfa3581158688789f6dfdc00672eb99b 5.6
-37ed55fd310d0cd32009dc5676121e86b404a23d 5.7
-67550a8ed9f4ef49ee5a31f433adbf5a0eaeccf9 5.8
-755cbfd3743ffb186cdf7e20be8e61dbdaa22503 6.0
-bc6655b4acf205dd9f25c702955645656077398a 6.0.1
-1ae2a75724bbba56373784f185a7f235ed0f24a4 6.0.2b1
-01271e84e5125fcc4f0f368a6e21116a5722953c 6.0.2
-7ea80190d494a766c6356fce85c844703964b6cc 6.1
-df26609c2f614f5fc9110342e4003ee8bd95cf84 7.0
-850a5c155c48b6ecfbb83b961586ea359b561522 8.0b1
-7ea0e7498e4ddbf63b6929ee83c75a9207996b08 8.0
-1af3a5f24f7dd4e51d117f701918052b7de65c99 8.1b1
-d62bf4e407b3b9b5bedcc1396a9ba46f35571902 8.0.1
-1c03d512e39d5cfd711ae3ed7e316769f427e43b 8.0.2
-6c3467488123ce70b1dd009145a02f51fb78cdcc 8.0.3
-2c467afffe9fe1e14618b576fac6b4f7c412a61e 8.0.4
-3f87370b6863e5a4e831b394ef1a58e0e97a4336 8.1
-995f6d9651312cd481ca1e5ddb271cbdd0474c57 8.2
-efbe39dae0aba9a7db399f6442758ae94e315c93 8.2.1
-cd14b2a72e51c7d13873ab6c2041f901b1a7a1cd 8.3
-0eee586a153f068142c1a0df4bc2635ed2c1a1cc 9.0b1
-921e60a0f9067311571fde9ccf2f35223159d9f6 8.4
-0d7b9b63d06ab7f68bc8edd56cb2034e6395d7fc 9.0
-fa069bf2411a150c9379d31a04d1c3836e2d3027 9.0.1
-3ed27d68d3f41bb5daa2afecfa9180d5958fe9d3 9.1
-0c4d18a747a6d39bff8e194a58af949a960d674a 10.0
-4c41e2cdd70beb0da556d71f46a67734c14f2bc2 10.0.1
-26b00011ec65b8f7b4f3d51078ec0a694701a45c 10.1
-651d41db58849d4fc50e466f4dc458d448480c4e 10.2
-1f5de53c079d577ead9d80265c9e006503b16457 10.2.1
-b4b92805bc0e9802da0b597d00df4fa42b30bc40 11.0
-6cd2b18f4be2a9c188fa505b34505b32f4a4554b 11.1
-feb5971e7827483bbdeb67613126bb79ed09e6d9 11.2
-a1a6a1ac9113b90009052ca7263174a488434099 11.3
-1116e568f534ad8f4f41328a0f5fa183eb739c90 11.3.1
-55666947c9eb7e3ba78081ad6ae004807c84aede 12.0
-747018b2e35a40cb4b1c444f150f013d02197c64 12.0.1
-a177ea34bf81662b904fe3af46f3c8719a947ef1 12.0.2
-bf8c5bcacd49bf0f9648013a40ebfc8f7c727f7b 12.0.3
-73dcfc90e3eecec6baddea19302c6b342e68e2fa 12.0.4
-01fbfc9194a2bc502edd682eebbf4d2f1bc79eee 12.0.5
-7bca8938434839dbb546b8bfccd9aab7a86d851e 12.1
-5ff5c804a8fa580cff499ba0025ff2e6a5474fd0 12.2
-8d50aac3b20793954121edb300b477cc75f3ec96 12.3
-297931cb8cac7d44d970adb927efd6cb36ac3526 12.4
-df34cc18624279faffdbc729c0a11e6ab0f46572 13.0
-ae1a5c5cf78f4f9f98c054f1c8cec6168d1d19b4 13.0.1
-e22a1d613bddf311e125eecd9c1e1cad02ab5063 13.0.2
-a3a105f795f8362f26e84e9acbc237ee2d6bcca4 14.0
-9751a1671a124e30ae344d1510b9c1dbb14f2775 14.1
-07fcc3226782b979cedaaf456c7f1c5b2fdafd2c 14.1.1
-d714fb731de779a1337d2d78cd413931f1f06193 14.2
-e3c635a7d463c7713c647d1aa560f83fd8e27ef0 14.3
-608948cef7e0ab8951691b149f5b6f0184a5635e 14.3.1
-617699fd3e44e54b6f95b80bfcf78164df37f266 15.0b1
-d2c4d84867154243993876d6248aafec1fd12679 15.0
-10fde952613b7a3f650fb1f6b6ed58cbd232fa3c 15.1
-df5dc9c7aa7521f552824dee1ed1315cfe180844 15.2
-e0825f0c7d5963c498266fe3c175220c695ae83b 16.0
-8e56240961015347fed477f00ca6a0783e81d3a2 17.0
-a37bcaaeab367f2364ed8c070659d52a4c0ae38e 17.1
-4a0d01d690ff184904293e7a3244ac24ec060a73 17.1.1
-fac98a49bd984ef5accf7177674d693277bfbaef 18.0b1
-0a49ee524b0a1d67d2a11c8c22f082b57acd7ae1 18.0
-e364795c1b09c70b6abb53770e09763b52bf807d 18.0.1
-c0395f556c35d8311fdfe2bda6846b91149819cd 18.1
-1a981f2e5031f55267dc2a28fa1b42274a1b64b2 18.2
-b59320212c8371d0be9e5e6c5f7eec392124c009 18.3
-7a705b610abb1177ca169311c4ee261f3e4f0957 18.3.1
-1e120f04bcaa2421c4df0eb6678c3019ba4a82f6 18.3.2
-6203335278be7543d31790d9fba55739469a4c6c 18.4
-31dc6d2ac0f5ab766652602fe6ca716fff7180e7 18.5
-dfe190b09908f6b953209d13573063809de451b8 18.6
-804f87045a901f1dc121cf9149143d654228dc13 18.6.1
-67d07805606aead09349d5b91d7d26c68ddad2fc 18.7
-3041e1fc409be90e885968b90faba405420fc161 18.7.1
-c811801ffa1de758cf01fbf6a86e4c04ff0c0935 18.8
-fbf06fa35f93a43f044b1645a7e4ff470edb462c 18.8.1
-cc41477ecf92f221c113736fac2830bf8079d40c 19.0
-834782ce49154e9744e499e00eb392c347f9e034 19.1
-0a2a3d89416e1642cf6f41d22dbc07b3d3c15a4d 19.1.1
-5d24cf9d1ced76c406ab3c4a94c25d1fe79b94bc 19.2
-66fa131a0d77a1b0e6f89ccb76b254cfb07d3da3 19.3b1
-32bba9bf8cce8350b560a7591c9ef5884a194211 19.3
-f47f3671508b015e9bb735603d3a0a6ec6a77b01 19.4
-0bda3291ac725750b899b4ba3e4b6765e7645daa 19.4.1
-0a68cbab72580a6f8d3bf9c45206669eefcd256b 19.5
-34121bf49b1a7ac77da7f7c75105c8a920218dd7 19.6b1
-3c2332e4ec72717bf17321473e5c3ad6e5778903 19.6
-35d9179d04390aada66eceae9ceb7b9274f67646 19.6.1
-d2782cbb2f15ca6831ab9426fbf8d4d6ca60db8a 19.6.2
-c6e619ce910d1650cc2433f94e5594964085f973 19.7
-2a60daeff0cdb039b20b2058aaad7dae7bcd2c1c 20.0
-06c9d3ffae80d7f5786c0a454d040d253d47fc03 20.1
-919a40f1843131249f98104c73f3aee3fc835e67 20.1.1
-74c4ffbe1f399345eb4f6a64785cfff54f7e6e7e 20.2
-1aacb05fbdfe06cee904e7a138a4aa6df7b88a63 20.2.1
-48aa5271ef1cd5379cf91a1c958e490692b978e7 20.2.2
-9c55a3a1268a33b4a57b96b2b9fa2cd0701780ee 20.3
-3e87e975a95c780eec497ef9e5a742f7adfb77ec 20.3.1
-06692c64fb9b5843331a918ab7093f151412ec8e 20.4
-f8174392e9e9c6a21ea5df0f22cb4ca885c799ca 20.5
-114f3dbc8a73dacbce2ebe08bb70ca76ab18390e v20.6.0
-a3d4006688fe5e754d0e709a52a00b8191819979 v20.6.1
-2831509712601a78fddf46e51d6f41ae0f92bd0e v20.6.2
-8b46dc41cb234c435b950a879214a6dee54c9dd2 v20.6.3
-7258be20fe93bbf936dc1a81ce71c04c5880663e v20.6.4
-7e0ab283db4e6f780777f7f06af475f044631fa1 v20.6.5
-57d63b38e85515d06e06d3cea62e35e6c54b5093 v20.6.6
-57d63b38e85515d06e06d3cea62e35e6c54b5093 v20.6.6
-b04dbdd161d7f68903a53e1dbd1fa5b5fde73f94 v20.6.6
-0804d30b6ead64e0e324aefd67439b84df2d1c01 v20.6.7
-a00910db03ec15865e4c8506820d4ad1df3e26f3 v20.6.8
-0262ab29fc2417b502a55f49b7fd43528fbd3df4 v20.7.0
-7f56b6f40de39456c78507a14c288709712881cb v20.8.0
-8cf9340669ae26e2b31f68b9c3f885ab7bdd65ce v20.8.1
-8bf8aaa139bb6a36fcd243214d6730a214ae08f5 v20.9.0
-c72faa468919fd2f226c97e94d4e64a6506860e5 v20.10.0
-3b5fdd077c7d83d02c4979ad69cc0bf199b47587 v20.10.1
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index feeb039f..00000000
--- a/.travis.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-language: python
-python:
- - 2.6
- - 2.7
- - 3.3
- - 3.4
- - 3.5
- - pypy
- - pypy3
-env:
- - ""
- - LC_ALL=C LC_CTYPE=C
-script:
- # avoid VersionConflict when newer version is required
- - pip install -U pytest
-
- # Output the env, because the travis docs just can't be trusted
- - env
-
- # update egg_info based on setup.py in checkout
- - python bootstrap.py
-
- - python setup.py test --addopts='-rs'
-
-before_deploy:
- - export SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES=1
-deploy:
- provider: pypi
- on:
- tags: true
- all_branches: true
- python: 3.5
- condition: $LC_ALL != "C"
- user: jaraco
- password:
- secure: tfWrsQMH2bHrWjqnP+08IX1WlkbW94Q30f4d7lCyhWS1FIf/jBDx4jrEILNfMxQ1NCwuBRje5sihj1Ow0BFf0vVrkaeff2IdvnNDEGFduMejaEQJL3s3QrLfpiAvUbtqwyWaHfAdGfk48PovDKTx0ZTvXZKYGXZhxGCYSlG2CE6Y6RDvnEl6Tk8e+LqUohkcSOwxrRwUoyxSnUaavdGohXxDT8MJlfWOXgr2u+KsRrriZqp3l6Fdsnk4IGvy6pXpy42L1HYQyyVu9XyJilR2JTbC6eCp5f8p26093m1Qas49+t6vYb0VLqQe12dO+Jm3v4uztSS5pPQzS7PFyjEYd2Rdb6ijsdbsy1074S4q7G9Sz+T3RsPUwYEJ07lzez8cxP64dtj5j94RL8m35A1Fb1OE8hHN+4c1yLG1gudfXbem+fUhi2eqhJrzQo5vsvDv1xS5x5GIS5ZHgKHCsWcW1Tv+dsFkrhaup3uU6VkOuc9UN+7VPsGEY7NvquGpTm8O1CnGJRzuJg6nbYRGj8ORwDpI0KmrExx6akV92P72fMC/I5TCgbSQSZn370H3Jj40gz1SM30WAli9M+wFHFd4ddMVY65yxj0NLmrP+m1tvnWdKtNh/RHuoW92d9/UFtiA5IhMf1/3djfsjBq6S9NT1uaLkVkTttqrPYJ7hOql8+g=
- distributions: release
diff --git a/CHANGES.rst b/CHANGES.rst
deleted file mode 100644
index be3d2ca4..00000000
--- a/CHANGES.rst
+++ /dev/null
@@ -1,2533 +0,0 @@
-=======
-CHANGES
-=======
-
-v20.10.0
---------
-
-* #553: egg_info section is now generated in a
- deterministic order, matching the order generated
- by earlier versions of Python. Except on Python 2.6,
- order is preserved when existing settings are present.
-* #556: Update to Packaging 16.7, restoring support
- for deprecated ``python_implmentation`` marker.
-* #555: Upload command now prompts for a password
- when uploading to PyPI (or other repository) if no
- password is present in .pypirc or in the keyring.
-
-v20.9.0
--------
-
-* #548: Update certify version to 2016.2.28
-* #545: Safely handle deletion of non-zip eggs in rotate
- command.
-
-v20.8.1
--------
-
-* Issue #544: Fix issue with extra environment marker
- processing in WorkingSet due to refactor in v20.7.0.
-
-v20.8.0
--------
-
-* Issue #543: Re-release so that latest release doesn't
- cause déjà vu with distribute and setuptools 0.7 in
- older environments.
-
-v20.7.0
--------
-
-* Refactored extra enviroment marker processing
- in WorkingSet.
-* Issue #533: Fixed intermittent test failures.
-* Issue #536: In msvc9_support, trap additional exceptions
- that might occur when importing
- ``distutils.msvc9compiler`` in mingw environments.
-* Issue #537: Provide better context when package
- metadata fails to decode in UTF-8.
-
-v20.6.8
--------
-
-* Issue #523: Restored support for environment markers,
- now honoring 'extra' environment markers.
-
-v20.6.7
--------
-
-* Issue #523: Disabled support for environment markers
- introduced in v20.5.
-
-v20.6.6
--------
-
-* Issue #503: Restore support for PEP 345 environment
- markers by updating to Packaging 16.6.
-
-v20.6.0
--------
-
-* New release process that relies on
- `bumpversion <https://github.com/peritus/bumpversion>`_
- and Travis CI for continuous deployment.
-* Project versioning semantics now follow
- `semver <https://semver.org>`_ precisely.
- The 'v' prefix on version numbers now also allows
- version numbers to be referenced in the changelog,
- e.g. https://pythonhosted.org/setuptools/history.html#v20-6-0.
-
-20.5
-----
-
-* BB Pull Request #185: Add support for environment markers
- in requirements in install_requires, setup_requires,
- tests_require as well as adding a test for the existing
- extra_requires machinery.
-
-20.4
-----
-
-* Issue #422: Moved hosting to
- `Github <https://github.com/pypa/setuptools>`_
- from `Bitbucket <https://bitbucket.org/pypa/setuptools>`_.
- Issues have been migrated, though all issues and comments
- are attributed to bb-migration. So if you have a particular
- issue or issues to which you've been subscribed, you will
- want to "watch" the equivalent issue in Github.
- The Bitbucket project will be retained for the indefinite
- future, but Github now hosts the canonical project repository.
-
-20.3.1
-------
-
-* Issue #519: Remove import hook when reloading the
- ``pkg_resources`` module.
-* BB Pull Request #184: Update documentation in ``pkg_resources``
- around new ``Requirement`` implementation.
-
-20.3
-----
-
-* BB Pull Request #179: ``pkg_resources.Requirement`` objects are
- now a subclass of ``packaging.requirements.Requirement``,
- allowing any environment markers and url (if any) to be
- affiliated with the requirement
-* BB Pull Request #179: Restore use of RequirementParseError
- exception unintentionally dropped in 20.2.
-
-20.2.2
-------
-
-* Issue #502: Correct regression in parsing of multiple
- version specifiers separated by commas and spaces.
-
-20.2.1
-------
-
-* Issue #499: Restore compatiblity for legacy versions
- by bumping to packaging 16.4.
-
-20.2
-----
-
-* Changelog now includes release dates and links to PEPs.
-* BB Pull Request #173: Replace dual PEP 345 _markerlib implementation
- and PEP 426 implementation of environment marker support from
- packaging 16.1 and PEP 508. Fixes Issue #122.
- See also BB Pull Request #175, BB Pull Request #168, and
- BB Pull Request #164. Additionally:
-
- - ``Requirement.parse`` no longer retains the order of extras.
- - ``parse_requirements`` now requires that all versions be
- PEP-440 compliant, as revealed in #499. Packages released
- with invalid local versions should be re-released using
- the proper local version syntax, e.g. ``mypkg-1.0+myorg.1``.
-
-20.1.1
-------
-
-* Update ``upload_docs`` command to also honor keyring
- for password resolution.
-
-20.1
-----
-
-* Added support for using passwords from keyring in the upload
- command. See `the upload docs
- <http://pythonhosted.org/setuptools/setuptools.html#upload-upload-source-and-or-egg-distributions-to-pypi>`_
- for details.
-
-20.0
-----
-
-* Issue #118: Once again omit the package metadata (egg-info)
- from the list of outputs in ``--record``. This version of setuptools
- can no longer be used to upgrade pip earlier than 6.0.
-
-19.7
-----
-
-* `Off-project PR <https://github.com/jaraco/setuptools/pull/32>`_:
- For FreeBSD, also honor root certificates from ca_root_nss.
-
-19.6.2
-------
-
-* Issue #491: Correct regression incurred in 19.4 where
- a double-namespace package installed using pip would
- cause a TypeError.
-
-19.6.1
-------
-
-* Restore compatibility for PyPy 3 compatibility lost in
- 19.4.1 addressing Issue #487.
-* ``setuptools.launch`` shim now loads scripts in a new
- namespace, avoiding getting relative imports from
- the setuptools package on Python 2.
-
-19.6
-----
-
-* Added a new entry script ``setuptools.launch``,
- implementing the shim found in
- ``pip.util.setuptools_build``. Use this command to launch
- distutils-only packages under setuptools in the same way that
- pip does, causing the setuptools monkeypatching of distutils
- to be invoked prior to invoking a script. Useful for debugging
- or otherwise installing a distutils-only package under
- setuptools when pip isn't available or otherwise does not
- expose the desired functionality. For example::
-
- $ python -m setuptools.launch setup.py develop
-
-* Issue #488: Fix dual manifestation of Extension class in
- extension packages installed as dependencies when Cython
- is present.
-
-19.5
-----
-
-* Issue #486: Correct TypeError when getfilesystemencoding
- returns None.
-* Issue #139: Clarified the license as MIT.
-* BB Pull Request #169: Removed special handling of command
- spec in scripts for Jython.
-
-19.4.1
-------
-
-* Issue #487: Use direct invocation of ``importlib.machinery``
- in ``pkg_resources`` to avoid missing detection on relevant
- platforms.
-
-19.4
-----
-
-* Issue #341: Correct error in path handling of package data
- files in ``build_py`` command when package is empty.
-* Distribute #323, Issue #141, Issue #207, and
- BB Pull Request #167: Another implementation of
- ``pkg_resources.WorkingSet`` and ``pkg_resources.Distribution``
- that supports replacing an extant package with a new one,
- allowing for setup_requires dependencies to supersede installed
- packages for the session.
-
-19.3
-----
-
-* Issue #229: Implement new technique for readily incorporating
- dependencies conditionally from vendored copies or primary
- locations. Adds a new dependency on six.
-
-19.2
-----
-
-* BB Pull Request #163: Add get_command_list method to Distribution.
-* BB Pull Request #162: Add missing whitespace to multiline string
- literals.
-
-19.1.1
-------
-
-* Issue #476: Cast version to string (using default encoding)
- to avoid creating Unicode types on Python 2 clients.
-* Issue #477: In Powershell downloader, use explicit rendering
- of strings, rather than rely on ``repr``, which can be
- incorrect (especially on Python 2).
-
-19.1
-----
-
-* Issue #215: The bootstrap script ``ez_setup.py`` now
- automatically detects
- the latest version of setuptools (using PyPI JSON API) rather
- than hard-coding a particular value.
-* Issue #475: Fix incorrect usage in _translate_metadata2.
-
-19.0
-----
-
-* Issue #442: Use RawConfigParser for parsing .pypirc file.
- Interpolated values are no longer honored in .pypirc files.
-
-18.8.1
-------
-
-* Issue #440: Prevent infinite recursion when a SandboxViolation
- or other UnpickleableException occurs in a sandbox context
- with setuptools hidden. Fixes regression introduced in Setuptools
- 12.0.
-
-18.8
-----
-
-* Deprecated ``egg_info.get_pkg_info_revision``.
-* Issue #471: Don't rely on repr for an HTML attribute value in
- package_index.
-* Issue #419: Avoid errors in FileMetadata when the metadata directory
- is broken.
-* Issue #472: Remove deprecated use of 'U' in mode parameter
- when opening files.
-
-18.7.1
-------
-
-* Issue #469: Refactored logic for Issue #419 fix to re-use metadata
- loading from Provider.
-
-18.7
-----
-
-* Update dependency on certify.
-* BB Pull Request #160: Improve detection of gui script in
- ``easy_install._adjust_header``.
-* Made ``test.test_args`` a non-data property; alternate fix
- for the issue reported in BB Pull Request #155.
-* Issue #453: In ``ez_setup`` bootstrap module, unload all
- ``pkg_resources`` modules following download.
-* BB Pull Request #158: Honor PEP-488 when excluding
- files for namespace packages.
-* Issue #419 and BB Pull Request #144: Add experimental support for
- reading the version info from distutils-installed metadata rather
- than using the version in the filename.
-
-18.6.1
-------
-
-* Issue #464: Correct regression in invocation of superclass on old-style
- class on Python 2.
-
-18.6
-----
-
-* Issue #439: When installing entry_point scripts under development,
- omit the version number of the package, allowing any version of the
- package to be used.
-
-18.5
-----
-
-* In preparation for dropping support for Python 3.2, a warning is
- now logged when pkg_resources is imported on Python 3.2 or earlier
- Python 3 versions.
-* `Add support for python_platform_implementation environment marker
- <https://github.com/jaraco/setuptools/pull/28>`_.
-* `Fix dictionary mutation during iteration
- <https://github.com/jaraco/setuptools/pull/29>`_.
-
-18.4
-----
-
-* Issue #446: Test command now always invokes unittest, even
- if no test suite is supplied.
-
-18.3.2
-------
-
-* Correct another regression in setuptools.findall
- where the fix for Python #12885 was lost.
-
-18.3.1
-------
-
-* Issue #425: Correct regression in setuptools.findall.
-
-18.3
-----
-
-* Setuptools now allows disabling of the manipulation of the sys.path
- during the processing of the easy-install.pth file. To do so, set
- the environment variable ``SETUPTOOLS_SYS_PATH_TECHNIQUE`` to
- anything but "rewrite" (consider "raw"). During any install operation
- with manipulation disabled, setuptools packages will be appended to
- sys.path naturally.
-
- Future versions may change the default behavior to disable
- manipulation. If so, the default behavior can be retained by setting
- the variable to "rewrite".
-
-* Issue #257: ``easy_install --version`` now shows more detail
- about the installation location and Python version.
-
-* Refactor setuptools.findall in preparation for re-submission
- back to distutils.
-
-18.2
-----
-
-* Issue #412: More efficient directory search in ``find_packages``.
-
-18.1
-----
-
-* Upgrade to vendored packaging 15.3.
-
-18.0.1
-------
-
-* Issue #401: Fix failure in test suite.
-
-18.0
-----
-
-* Dropped support for builds with Pyrex. Only Cython is supported.
-* Issue #288: Detect Cython later in the build process, after
- ``setup_requires`` dependencies are resolved.
- Projects backed by Cython can now be readily built
- with a ``setup_requires`` dependency. For example::
-
- ext = setuptools.Extension('mylib', ['src/CythonStuff.pyx', 'src/CStuff.c'])
- setuptools.setup(
- ...
- ext_modules=[ext],
- setup_requires=['cython'],
- )
-
- For compatibility with older versions of setuptools, packagers should
- still include ``src/CythonMod.c`` in the source distributions or
- require that Cython be present before building source distributions.
- However, for systems with this build of setuptools, Cython will be
- downloaded on demand.
-* Issue #396: Fixed test failure on OS X.
-* BB Pull Request #136: Remove excessive quoting from shebang headers
- for Jython.
-
-17.1.1
-------
-
-* Backed out unintended changes to pkg_resources, restoring removal of
- deprecated imp module (`ref
- <https://bitbucket.org/pypa/setuptools/commits/f572ec9563d647fa8d4ffc534f2af8070ea07a8b#comment-1881283>`_).
-
-17.1
-----
-
-* Issue #380: Add support for range operators on environment
- marker evaluation.
-
-17.0
-----
-
-* Issue #378: Do not use internal importlib._bootstrap module.
-* Issue #390: Disallow console scripts with path separators in
- the name. Removes unintended functionality and brings behavior
- into parity with pip.
-
-16.0
-----
-
-* BB Pull Request #130: Better error messages for errors in
- parsed requirements.
-* BB Pull Request #133: Removed ``setuptools.tests`` from the
- installed packages.
-* BB Pull Request #129: Address deprecation warning due to usage
- of imp module.
-
-15.2
-----
-
-* Issue #373: Provisionally expose
- ``pkg_resources._initialize_master_working_set``, allowing for
- imperative re-initialization of the master working set.
-
-15.1
-----
-
-* Updated to Packaging 15.1 to address Packaging #28.
-* Fix ``setuptools.sandbox._execfile()`` with Python 3.1.
-
-15.0
-----
-
-* BB Pull Request #126: DistributionNotFound message now lists the package or
- packages that required it. E.g.::
-
- pkg_resources.DistributionNotFound: The 'colorama>=0.3.1' distribution was not found and is required by smlib.log.
-
- Note that zc.buildout once dependended on the string rendering of this
- message to determine the package that was not found. This expectation
- has since been changed, but older versions of buildout may experience
- problems. See Buildout #242 for details.
-
-14.3.1
-------
-
-* Issue #307: Removed PEP-440 warning during parsing of versions
- in ``pkg_resources.Distribution``.
-* Issue #364: Replace deprecated usage with recommended usage of
- ``EntryPoint.load``.
-
-14.3
-----
-
-* Issue #254: When creating temporary egg cache on Unix, use mode 755
- for creating the directory to avoid the subsequent warning if
- the directory is group writable.
-
-14.2
-----
-
-* Issue #137: Update ``Distribution.hashcmp`` so that Distributions with
- None for pyversion or platform can be compared against Distributions
- defining those attributes.
-
-14.1.1
-------
-
-* Issue #360: Removed undesirable behavior from test runs, preventing
- write tests and installation to system site packages.
-
-14.1
-----
-
-* BB Pull Request #125: Add ``__ne__`` to Requirement class.
-* Various refactoring of easy_install.
-
-14.0
-----
-
-* Bootstrap script now accepts ``--to-dir`` to customize save directory or
- allow for re-use of existing repository of setuptools versions. See
- BB Pull Request #112 for background.
-* Issue #285: ``easy_install`` no longer will default to installing
- packages to the "user site packages" directory if it is itself installed
- there. Instead, the user must pass ``--user`` in all cases to install
- packages to the user site packages.
- This behavior now matches that of "pip install". To configure
- an environment to always install to the user site packages, consider
- using the "install-dir" and "scripts-dir" parameters to easy_install
- through an appropriate distutils config file.
-
-13.0.2
-------
-
-* Issue #359: Include pytest.ini in the sdist so invocation of py.test on the
- sdist honors the pytest configuration.
-
-13.0.1
-------
-
-Re-release of 13.0. Intermittent connectivity issues caused the release
-process to fail and PyPI uploads no longer accept files for 13.0.
-
-13.0
-----
-
-* Issue #356: Back out BB Pull Request #119 as it requires Setuptools 10 or later
- as the source during an upgrade.
-* Removed build_py class from setup.py. According to 892f439d216e, this
- functionality was added to support upgrades from old Distribute versions,
- 0.6.5 and 0.6.6.
-
-12.4
-----
-
-* BB Pull Request #119: Restore writing of ``setup_requires`` to metadata
- (previously added in 8.4 and removed in 9.0).
-
-12.3
-----
-
-* Documentation is now linked using the rst.linker package.
-* Fix ``setuptools.command.easy_install.extract_wininst_cfg()``
- with Python 2.6 and 2.7.
-* Issue #354. Added documentation on building setuptools
- documentation.
-
-12.2
-----
-
-* Issue #345: Unload all modules under pkg_resources during
- ``ez_setup.use_setuptools()``.
-* Issue #336: Removed deprecation from ``ez_setup.use_setuptools``,
- as it is clearly still used by buildout's bootstrap. ``ez_setup``
- remains deprecated for use by individual packages.
-* Simplified implementation of ``ez_setup.use_setuptools``.
-
-12.1
-----
-
-* BB Pull Request #118: Soften warning for non-normalized versions in
- Distribution.
-
-12.0.5
-------
-
-* Issue #339: Correct Attribute reference in ``cant_write_to_target``.
-* Issue #336: Deprecated ``ez_setup.use_setuptools``.
-
-12.0.4
-------
-
-* Issue #335: Fix script header generation on Windows.
-
-12.0.3
-------
-
-* Fixed incorrect class attribute in ``install_scripts``. Tests would be nice.
-
-12.0.2
-------
-
-* Issue #331: Fixed ``install_scripts`` command on Windows systems corrupting
- the header.
-
-12.0.1
-------
-
-* Restore ``setuptools.command.easy_install.sys_executable`` for pbr
- compatibility. For the future, tools should construct a CommandSpec
- explicitly.
-
-12.0
-----
-
-* Issue #188: Setuptools now support multiple entities in the value for
- ``build.executable``, such that an executable of "/usr/bin/env my-python" may
- be specified. This means that systems with a specified executable whose name
- has spaces in the path must be updated to escape or quote that value.
-* Deprecated ``easy_install.ScriptWriter.get_writer``, replaced by ``.best()``
- with slightly different semantics (no force_windows flag).
-
-11.3.1
-------
-
-* Issue #327: Formalize and restore support for any printable character in an
- entry point name.
-
-11.3
-----
-
-* Expose ``EntryPoint.resolve`` in place of EntryPoint._load, implementing the
- simple, non-requiring load. Deprecated all uses of ``EntryPoint._load``
- except for calling with no parameters, which is just a shortcut for
- ``ep.require(); ep.resolve();``.
-
- Apps currently invoking ``ep.load(require=False)`` should instead do the
- following if wanting to avoid the deprecating warning::
-
- getattr(ep, "resolve", lambda: ep.load(require=False))()
-
-11.2
-----
-
-* Pip #2326: Report deprecation warning at stacklevel 2 for easier diagnosis.
-
-11.1
-----
-
-* Issue #281: Since Setuptools 6.1 (Issue #268), a ValueError would be raised
- in certain cases where VersionConflict was raised with two arguments, which
- occurred in ``pkg_resources.WorkingSet.find``. This release adds support
- for indicating the dependent packages while maintaining support for
- a VersionConflict when no dependent package context is known. New unit tests
- now capture the expected interface.
-
-11.0
-----
-
-* Interop #3: Upgrade to Packaging 15.0; updates to PEP 440 so that >1.7 does
- not exclude 1.7.1 but does exclude 1.7.0 and 1.7.0.post1.
-
-10.2.1
-------
-
-* Issue #323: Fix regression in entry point name parsing.
-
-10.2
-----
-
-* Deprecated use of EntryPoint.load(require=False). Passing a boolean to a
- function to select behavior is an anti-pattern. Instead use
- ``Entrypoint._load()``.
-* Substantial refactoring of all unit tests. Tests are now much leaner and
- re-use a lot of fixtures and contexts for better clarity of purpose.
-
-10.1
-----
-
-* Issue #320: Added a compatibility implementation of
- ``sdist._default_revctrl``
- so that systems relying on that interface do not fail (namely, Ubuntu 12.04
- and similar Debian releases).
-
-10.0.1
-------
-
-* Issue #319: Fixed issue installing pure distutils packages.
-
-10.0
-----
-
-* Issue #313: Removed built-in support for subversion. Projects wishing to
- retain support for subversion will need to use a third party library. The
- extant implementation is being ported to `setuptools_svn
- <https://pypi.python.org/pypi/setuptools_svn>`_.
-* Issue #315: Updated setuptools to hide its own loaded modules during
- installation of another package. This change will enable setuptools to
- upgrade (or downgrade) itself even when its own metadata and implementation
- change.
-
-9.1
----
-
-* Prefer vendored packaging library `as recommended
- <https://github.com/jaraco/setuptools/commit/170657b68f4b92e7e1bf82f5e19a831f5744af67#commitcomment-9109448>`_.
-
-9.0.1
------
-
-* Issue #312: Restored presence of pkg_resources API tests (doctest) to sdist.
-
-9.0
----
-
-* Issue #314: Disabled support for ``setup_requires`` metadata to avoid issue
- where Setuptools was unable to upgrade over earlier versions.
-
-8.4
----
-
-* BB Pull Request #106: Now write ``setup_requires`` metadata.
-
-8.3
----
-
-* Issue #311: Decoupled pkg_resources from setuptools once again.
- ``pkg_resources`` is now a package instead of a module.
-
-8.2.1
------
-
-* Issue #306: Suppress warnings about Version format except in select scenarios
- (such as installation).
-
-8.2
----
-
-* BB Pull Request #85: Search egg-base when adding egg-info to manifest.
-
-8.1
----
-
-* Upgrade ``packaging`` to 14.5, giving preference to "rc" as designator for
- release candidates over "c".
-* PEP-440 warnings are now raised as their own class,
- ``pkg_resources.PEP440Warning``, instead of RuntimeWarning.
-* Disabled warnings on empty versions.
-
-8.0.4
------
-
-* Upgrade ``packaging`` to 14.4, fixing an error where there is a
- different result for if 2.0.5 is contained within >2.0dev and >2.0.dev even
- though normalization rules should have made them equal.
-* Issue #296: Add warning when a version is parsed as legacy. This warning will
- make it easier for developers to recognize deprecated version numbers.
-
-8.0.3
------
-
-* Issue #296: Restored support for ``__hash__`` on parse_version results.
-
-8.0.2
------
-
-* Issue #296: Restored support for ``__getitem__`` and sort operations on
- parse_version result.
-
-8.0.1
------
-
-* Issue #296: Restore support for iteration over parse_version result, but
- deprecated that usage with a warning. Fixes failure with buildout.
-
-8.0
----
-
-* Implement PEP 440 within
- pkg_resources and setuptools. This change
- deprecates some version numbers such that they will no longer be installable
- without using the ``===`` escape hatch. See `the changes to test_resources
- <https://bitbucket.org/pypa/setuptools/commits/dcd552da643c4448056de84c73d56da6d70769d5#chg-setuptools/tests/test_resources.py>`_
- for specific examples of version numbers and specifiers that are no longer
- supported. Setuptools now "vendors" the `packaging
- <https://github.com/pypa/packaging>`_ library.
-
-7.0
----
-
-* Issue #80, Issue #209: Eggs that are downloaded for ``setup_requires``,
- ``test_requires``, etc. are now placed in a ``./.eggs`` directory instead of
- directly in the current directory. This choice of location means the files
- can be readily managed (removed, ignored). Additionally,
- later phases or invocations of setuptools will not detect the package as
- already installed and ignore it for permanent install (See #209).
-
- This change is indicated as backward-incompatible as installations that
- depend on the installation in the current directory will need to account for
- the new location. Systems that ignore ``*.egg`` will probably need to be
- adapted to ignore ``.eggs``. The files will need to be manually moved or
- will be retrieved again. Most use cases will require no attention.
-
-6.1
----
-
-* Issue #268: When resolving package versions, a VersionConflict now reports
- which package previously required the conflicting version.
-
-6.0.2
------
-
-* Issue #262: Fixed regression in pip install due to egg-info directories
- being omitted. Re-opens Issue #118.
-
-6.0.1
------
-
-* Issue #259: Fixed regression with namespace package handling on ``single
- version, externally managed`` installs.
-
-6.0
----
-
-* Issue #100: When building a distribution, Setuptools will no longer match
- default files using platform-dependent case sensitivity, but rather will
- only match the files if their case matches exactly. As a result, on Windows
- and other case-insensitive file systems, files with names such as
- 'readme.txt' or 'README.TXT' will be omitted from the distribution and a
- warning will be issued indicating that 'README.txt' was not found. Other
- filenames affected are:
-
- - README.rst
- - README
- - setup.cfg
- - setup.py (or the script name)
- - test/test*.py
-
- Any users producing distributions with filenames that match those above
- case-insensitively, but not case-sensitively, should rename those files in
- their repository for better portability.
-* BB Pull Request #72: When using ``single_version_externally_managed``, the
- exclusion list now includes Python 3.2 ``__pycache__`` entries.
-* BB Pull Request #76 and BB Pull Request #78: lines in top_level.txt are now
- ordered deterministically.
-* Issue #118: The egg-info directory is now no longer included in the list
- of outputs.
-* Issue #258: Setuptools now patches distutils msvc9compiler to
- recognize the specially-packaged compiler package for easy extension module
- support on Python 2.6, 2.7, and 3.2.
-
-5.8
----
-
-* Issue #237: ``pkg_resources`` now uses explicit detection of Python 2 vs.
- Python 3, supporting environments where builtins have been patched to make
- Python 3 look more like Python 2.
-
-5.7
----
-
-* Issue #240: Based on real-world performance measures against 5.4, zip
- manifests are now cached in all circumstances. The
- ``PKG_RESOURCES_CACHE_ZIP_MANIFESTS`` environment variable is no longer
- relevant. The observed "memory increase" referenced in the 5.4 release
- notes and detailed in Issue #154 was likely not an increase over the status
- quo, but rather only an increase over not storing the zip info at all.
-
-5.6
----
-
-* Issue #242: Use absolute imports in svn_utils to avoid issues if the
- installing package adds an xml module to the path.
-
-5.5.1
------
-
-* Issue #239: Fix typo in 5.5 such that fix did not take.
-
-5.5
----
-
-* Issue #239: Setuptools now includes the setup_requires directive on
- Distribution objects and validates the syntax just like install_requires
- and tests_require directives.
-
-5.4.2
------
-
-* Issue #236: Corrected regression in execfile implementation for Python 2.6.
-
-5.4.1
------
-
-* Python #7776: (ssl_support) Correct usage of host for validation when
- tunneling for HTTPS.
-
-5.4
----
-
-* Issue #154: ``pkg_resources`` will now cache the zip manifests rather than
- re-processing the same file from disk multiple times, but only if the
- environment variable ``PKG_RESOURCES_CACHE_ZIP_MANIFESTS`` is set. Clients
- that package many modules in the same zip file will see some improvement
- in startup time by enabling this feature. This feature is not enabled by
- default because it causes a substantial increase in memory usage.
-
-5.3
----
-
-* Issue #185: Make svn tagging work on the new style SVN metadata.
- Thanks cazabon!
-* Prune revision control directories (e.g .svn) from base path
- as well as sub-directories.
-
-5.2
----
-
-* Added a `Developer Guide
- <https://pythonhosted.org/setuptools/developer-guide.html>`_ to the official
- documentation.
-* Some code refactoring and cleanup was done with no intended behavioral
- changes.
-* During install_egg_info, the generated lines for namespace package .pth
- files are now processed even during a dry run.
-
-5.1
----
-
-* Issue #202: Implemented more robust cache invalidation for the ZipImporter,
- building on the work in Issue #168. Special thanks to Jurko Gospodnetic and
- PJE.
-
-5.0.2
------
-
-* Issue #220: Restored script templates.
-
-5.0.1
------
-
-* Renamed script templates to end with .tmpl now that they no longer need
- to be processed by 2to3. Fixes spurious syntax errors during build/install.
-
-5.0
----
-
-* Issue #218: Re-release of 3.8.1 to signal that it supersedes 4.x.
-* Incidentally, script templates were updated not to include the triple-quote
- escaping.
-
-3.7.1 and 3.8.1 and 4.0.1
--------------------------
-
-* Issue #213: Use legacy StringIO behavior for compatibility under pbr.
-* Issue #218: Setuptools 3.8.1 superseded 4.0.1, and 4.x was removed
- from the available versions to install.
-
-4.0
----
-
-* Issue #210: ``setup.py develop`` now copies scripts in binary mode rather
- than text mode, matching the behavior of the ``install`` command.
-
-3.8
----
-
-* Extend Issue #197 workaround to include all Python 3 versions prior to
- 3.2.2.
-
-3.7
----
-
-* Issue #193: Improved handling of Unicode filenames when building manifests.
-
-3.6
----
-
-* Issue #203: Honor proxy settings for Powershell downloader in the bootstrap
- routine.
-
-3.5.2
------
-
-* Issue #168: More robust handling of replaced zip files and stale caches.
- Fixes ZipImportError complaining about a 'bad local header'.
-
-3.5.1
------
-
-* Issue #199: Restored ``install._install`` for compatibility with earlier
- NumPy versions.
-
-3.5
----
-
-* Issue #195: Follow symbolic links in find_packages (restoring behavior
- broken in 3.4).
-* Issue #197: On Python 3.1, PKG-INFO is now saved in a UTF-8 encoding instead
- of ``sys.getpreferredencoding`` to match the behavior on Python 2.6-3.4.
-* Issue #192: Preferred bootstrap location is now
- https://bootstrap.pypa.io/ez_setup.py (mirrored from former location).
-
-3.4.4
------
-
-* Issue #184: Correct failure where find_package over-matched packages
- when directory traversal isn't short-circuited.
-
-3.4.3
------
-
-* Issue #183: Really fix test command with Python 3.1.
-
-3.4.2
------
-
-* Issue #183: Fix additional regression in test command on Python 3.1.
-
-3.4.1
------
-
-* Issue #180: Fix regression in test command not caught by py.test-run tests.
-
-3.4
----
-
-* Issue #176: Add parameter to the test command to support a custom test
- runner: --test-runner or -r.
-* Issue #177: Now assume most common invocation to install command on
- platforms/environments without stack support (issuing a warning). Setuptools
- now installs naturally on IronPython. Behavior on CPython should be
- unchanged.
-
-3.3
----
-
-* Add ``include`` parameter to ``setuptools.find_packages()``.
-
-3.2
----
-
-* BB Pull Request #39: Add support for C++ targets from Cython ``.pyx`` files.
-* Issue #162: Update dependency on certifi to 1.0.1.
-* Issue #164: Update dependency on wincertstore to 0.2.
-
-3.1
----
-
-* Issue #161: Restore Features functionality to allow backward compatibility
- (for Features) until the uses of that functionality is sufficiently removed.
-
-3.0.2
------
-
-* Correct typo in previous bugfix.
-
-3.0.1
------
-
-* Issue #157: Restore support for Python 2.6 in bootstrap script where
- ``zipfile.ZipFile`` does not yet have support for context managers.
-
-3.0
----
-
-* Issue #125: Prevent Subversion support from creating a ~/.subversion
- directory just for checking the presence of a Subversion repository.
-* Issue #12: Namespace packages are now imported lazily. That is, the mere
- declaration of a namespace package in an egg on ``sys.path`` no longer
- causes it to be imported when ``pkg_resources`` is imported. Note that this
- change means that all of a namespace package's ``__init__.py`` files must
- include a ``declare_namespace()`` call in order to ensure that they will be
- handled properly at runtime. In 2.x it was possible to get away without
- including the declaration, but only at the cost of forcing namespace
- packages to be imported early, which 3.0 no longer does.
-* Issue #148: When building (bdist_egg), setuptools no longer adds
- ``__init__.py`` files to namespace packages. Any packages that rely on this
- behavior will need to create ``__init__.py`` files and include the
- ``declare_namespace()``.
-* Issue #7: Setuptools itself is now distributed as a zip archive in addition to
- tar archive. ez_setup.py now uses zip archive. This approach avoids the potential
- security vulnerabilities presented by use of tar archives in ez_setup.py.
- It also leverages the security features added to ZipFile.extract in Python 2.7.4.
-* Issue #65: Removed deprecated Features functionality.
-* BB Pull Request #28: Remove backport of ``_bytecode_filenames`` which is
- available in Python 2.6 and later, but also has better compatibility with
- Python 3 environments.
-* Issue #156: Fix spelling of __PYVENV_LAUNCHER__ variable.
-
-2.2
----
-
-* Issue #141: Restored fix for allowing setup_requires dependencies to
- override installed dependencies during setup.
-* Issue #128: Fixed issue where only the first dependency link was honored
- in a distribution where multiple dependency links were supplied.
-
-2.1.2
------
-
-* Issue #144: Read long_description using codecs module to avoid errors
- installing on systems where LANG=C.
-
-2.1.1
------
-
-* Issue #139: Fix regression in re_finder for CVS repos (and maybe Git repos
- as well).
-
-2.1
----
-
-* Issue #129: Suppress inspection of ``*.whl`` files when searching for files
- in a zip-imported file.
-* Issue #131: Fix RuntimeError when constructing an egg fetcher.
-
-2.0.2
------
-
-* Fix NameError during installation with Python implementations (e.g. Jython)
- not containing parser module.
-* Fix NameError in ``sdist:re_finder``.
-
-2.0.1
------
-
-* Issue #124: Fixed error in list detection in upload_docs.
-
-2.0
----
-
-* Issue #121: Exempt lib2to3 pickled grammars from DirectorySandbox.
-* Issue #41: Dropped support for Python 2.4 and Python 2.5. Clients requiring
- setuptools for those versions of Python should use setuptools 1.x.
-* Removed ``setuptools.command.easy_install.HAS_USER_SITE``. Clients
- expecting this boolean variable should use ``site.ENABLE_USER_SITE``
- instead.
-* Removed ``pkg_resources.ImpWrapper``. Clients that expected this class
- should use ``pkgutil.ImpImporter`` instead.
-
-1.4.2
------
-
-* Issue #116: Correct TypeError when reading a local package index on Python
- 3.
-
-1.4.1
------
-
-* Issue #114: Use ``sys.getfilesystemencoding`` for decoding config in
- ``bdist_wininst`` distributions.
-
-* Issue #105 and Issue #113: Establish a more robust technique for
- determining the terminal encoding::
-
- 1. Try ``getpreferredencoding``
- 2. If that returns US_ASCII or None, try the encoding from
- ``getdefaultlocale``. If that encoding was a "fallback" because Python
- could not figure it out from the environment or OS, encoding remains
- unresolved.
- 3. If the encoding is resolved, then make sure Python actually implements
- the encoding.
- 4. On the event of an error or unknown codec, revert to fallbacks
- (UTF-8 on Darwin, ASCII on everything else).
- 5. On the encoding is 'mac-roman' on Darwin, use UTF-8 as 'mac-roman' was
- a bug on older Python releases.
-
- On a side note, it would seem that the encoding only matters for when SVN
- does not yet support ``--xml`` and when getting repository and svn version
- numbers. The ``--xml`` technique should yield UTF-8 according to some
- messages on the SVN mailing lists. So if the version numbers are always
- 7-bit ASCII clean, it may be best to only support the file parsing methods
- for legacy SVN releases and support for SVN without the subprocess command
- would simple go away as support for the older SVNs does.
-
-1.4
----
-
-* Issue #27: ``easy_install`` will now use credentials from .pypirc if
- present for connecting to the package index.
-* BB Pull Request #21: Omit unwanted newlines in ``package_index._encode_auth``
- when the username/password pair length indicates wrapping.
-
-1.3.2
------
-
-* Issue #99: Fix filename encoding issues in SVN support.
-
-1.3.1
------
-
-* Remove exuberant warning in SVN support when SVN is not used.
-
-1.3
----
-
-* Address security vulnerability in SSL match_hostname check as reported in
- Python #17997.
-* Prefer `backports.ssl_match_hostname
- <https://pypi.python.org/pypi/backports.ssl_match_hostname>`_ for backport
- implementation if present.
-* Correct NameError in ``ssl_support`` module (``socket.error``).
-
-1.2
----
-
-* Issue #26: Add support for SVN 1.7. Special thanks to Philip Thiem for the
- contribution.
-* Issue #93: Wheels are now distributed with every release. Note that as
- reported in Issue #108, as of Pip 1.4, scripts aren't installed properly
- from wheels. Therefore, if using Pip to install setuptools from a wheel,
- the ``easy_install`` command will not be available.
-* Setuptools "natural" launcher support, introduced in 1.0, is now officially
- supported.
-
-1.1.7
------
-
-* Fixed behavior of NameError handling in 'script template (dev).py' (script
- launcher for 'develop' installs).
-* ``ez_setup.py`` now ensures partial downloads are cleaned up following
- a failed download.
-* Distribute #363 and Issue #55: Skip an sdist test that fails on locales
- other than UTF-8.
-
-1.1.6
------
-
-* Distribute #349: ``sandbox.execfile`` now opens the target file in binary
- mode, thus honoring a BOM in the file when compiled.
-
-1.1.5
------
-
-* Issue #69: Second attempt at fix (logic was reversed).
-
-1.1.4
------
-
-* Issue #77: Fix error in upload command (Python 2.4).
-
-1.1.3
------
-
-* Fix NameError in previous patch.
-
-1.1.2
------
-
-* Issue #69: Correct issue where 404 errors are returned for URLs with
- fragments in them (such as #egg=).
-
-1.1.1
------
-
-* Issue #75: Add ``--insecure`` option to ez_setup.py to accommodate
- environments where a trusted SSL connection cannot be validated.
-* Issue #76: Fix AttributeError in upload command with Python 2.4.
-
-1.1
----
-
-* Issue #71 (Distribute #333): EasyInstall now puts less emphasis on the
- condition when a host is blocked via ``--allow-hosts``.
-* Issue #72: Restored Python 2.4 compatibility in ``ez_setup.py``.
-
-1.0
----
-
-* Issue #60: On Windows, Setuptools supports deferring to another launcher,
- such as Vinay Sajip's `pylauncher <https://bitbucket.org/pypa/pylauncher>`_
- (included with Python 3.3) to launch console and GUI scripts and not install
- its own launcher executables. This experimental functionality is currently
- only enabled if the ``SETUPTOOLS_LAUNCHER`` environment variable is set to
- "natural". In the future, this behavior may become default, but only after
- it has matured and seen substantial adoption. The ``SETUPTOOLS_LAUNCHER``
- also accepts "executable" to force the default behavior of creating launcher
- executables.
-* Issue #63: Bootstrap script (ez_setup.py) now prefers Powershell, curl, or
- wget for retrieving the Setuptools tarball for improved security of the
- install. The script will still fall back to a simple ``urlopen`` on
- platforms that do not have these tools.
-* Issue #65: Deprecated the ``Features`` functionality.
-* Issue #52: In ``VerifyingHTTPSConn``, handle a tunnelled (proxied)
- connection.
-
-Backward-Incompatible Changes
-=============================
-
-This release includes a couple of backward-incompatible changes, but most if
-not all users will find 1.0 a drop-in replacement for 0.9.
-
-* Issue #50: Normalized API of environment marker support. Specifically,
- removed line number and filename from SyntaxErrors when returned from
- `pkg_resources.invalid_marker`. Any clients depending on the specific
- string representation of exceptions returned by that function may need to
- be updated to account for this change.
-* Issue #50: SyntaxErrors generated by `pkg_resources.invalid_marker` are
- normalized for cross-implementation consistency.
-* Removed ``--ignore-conflicts-at-my-risk`` and ``--delete-conflicting``
- options to easy_install. These options have been deprecated since 0.6a11.
-
-0.9.8
------
-
-* Issue #53: Fix NameErrors in `_vcs_split_rev_from_url`.
-
-0.9.7
------
-
-* Issue #49: Correct AttributeError on PyPy where a hashlib.HASH object does
- not have a `.name` attribute.
-* Issue #34: Documentation now refers to bootstrap script in code repository
- referenced by bookmark.
-* Add underscore-separated keys to environment markers (markerlib).
-
-0.9.6
------
-
-* Issue #44: Test failure on Python 2.4 when MD5 hash doesn't have a `.name`
- attribute.
-
-0.9.5
------
-
-* Python #17980: Fix security vulnerability in SSL certificate validation.
-
-0.9.4
------
-
-* Issue #43: Fix issue (introduced in 0.9.1) with version resolution when
- upgrading over other releases of Setuptools.
-
-0.9.3
------
-
-* Issue #42: Fix new ``AttributeError`` introduced in last fix.
-
-0.9.2
------
-
-* Issue #42: Fix regression where blank checksums would trigger an
- ``AttributeError``.
-
-0.9.1
------
-
-* Distribute #386: Allow other positional and keyword arguments to os.open.
-* Corrected dependency on certifi mis-referenced in 0.9.
-
-0.9
----
-
-* `package_index` now validates hashes other than MD5 in download links.
-
-0.8
----
-
-* Code base now runs on Python 2.4 - Python 3.3 without Python 2to3
- conversion.
-
-0.7.8
------
-
-* Distribute #375: Yet another fix for yet another regression.
-
-0.7.7
------
-
-* Distribute #375: Repair AttributeError created in last release (redo).
-* Issue #30: Added test for get_cache_path.
-
-0.7.6
------
-
-* Distribute #375: Repair AttributeError created in last release.
-
-0.7.5
------
-
-* Issue #21: Restore Python 2.4 compatibility in ``test_easy_install``.
-* Distribute #375: Merged additional warning from Distribute 0.6.46.
-* Now honor the environment variable
- ``SETUPTOOLS_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT`` in addition to the now
- deprecated ``DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT``.
-
-0.7.4
------
-
-* Issue #20: Fix comparison of parsed SVN version on Python 3.
-
-0.7.3
------
-
-* Issue #1: Disable installation of Windows-specific files on non-Windows systems.
-* Use new sysconfig module with Python 2.7 or >=3.2.
-
-0.7.2
------
-
-* Issue #14: Use markerlib when the `parser` module is not available.
-* Issue #10: ``ez_setup.py`` now uses HTTPS to download setuptools from PyPI.
-
-0.7.1
------
-
-* Fix NameError (Issue #3) again - broken in bad merge.
-
-0.7
----
-
-* Merged Setuptools and Distribute. See docs/merge.txt for details.
-
-Added several features that were slated for setuptools 0.6c12:
-
-* Index URL now defaults to HTTPS.
-* Added experimental environment marker support. Now clients may designate a
- PEP-426 environment marker for "extra" dependencies. Setuptools uses this
- feature in ``setup.py`` for optional SSL and certificate validation support
- on older platforms. Based on Distutils-SIG discussions, the syntax is
- somewhat tentative. There should probably be a PEP with a firmer spec before
- the feature should be considered suitable for use.
-* Added support for SSL certificate validation when installing packages from
- an HTTPS service.
-
-0.7b4
------
-
-* Issue #3: Fixed NameError in SSL support.
-
-0.6.49
-------
-
-* Move warning check in ``get_cache_path`` to follow the directory creation
- to avoid errors when the cache path does not yet exist. Fixes the error
- reported in Distribute #375.
-
-0.6.48
-------
-
-* Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in
- 0.6.46 (redo).
-
-0.6.47
-------
-
-* Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in
- 0.6.46.
-
-0.6.46
-------
-
-* Distribute #375: Issue a warning if the PYTHON_EGG_CACHE or otherwise
- customized egg cache location specifies a directory that's group- or
- world-writable.
-
-0.6.45
-------
-
-* Distribute #379: ``distribute_setup.py`` now traps VersionConflict as well,
- restoring ability to upgrade from an older setuptools version.
-
-0.6.44
-------
-
-* ``distribute_setup.py`` has been updated to allow Setuptools 0.7 to
- satisfy use_setuptools.
-
-0.6.43
-------
-
-* Distribute #378: Restore support for Python 2.4 Syntax (regression in 0.6.42).
-
-0.6.42
-------
-
-* External links finder no longer yields duplicate links.
-* Distribute #337: Moved site.py to setuptools/site-patch.py (graft of very old
- patch from setuptools trunk which inspired PR #31).
-
-0.6.41
-------
-
-* Distribute #27: Use public api for loading resources from zip files rather than
- the private method `_zip_directory_cache`.
-* Added a new function ``easy_install.get_win_launcher`` which may be used by
- third-party libraries such as buildout to get a suitable script launcher.
-
-0.6.40
-------
-
-* Distribute #376: brought back cli.exe and gui.exe that were deleted in the
- previous release.
-
-0.6.39
-------
-
-* Add support for console launchers on ARM platforms.
-* Fix possible issue in GUI launchers where the subsystem was not supplied to
- the linker.
-* Launcher build script now refactored for robustness.
-* Distribute #375: Resources extracted from a zip egg to the file system now also
- check the contents of the file against the zip contents during each
- invocation of get_resource_filename.
-
-0.6.38
-------
-
-* Distribute #371: The launcher manifest file is now installed properly.
-
-0.6.37
-------
-
-* Distribute #143: Launcher scripts, including easy_install itself, are now
- accompanied by a manifest on 32-bit Windows environments to avoid the
- Installer Detection Technology and thus undesirable UAC elevation described
- in `this Microsoft article
- <http://technet.microsoft.com/en-us/library/cc709628%28WS.10%29.aspx>`_.
-
-0.6.36
-------
-
-* BB Pull Request #35: In Buildout #64, it was reported that
- under Python 3, installation of distutils scripts could attempt to copy
- the ``__pycache__`` directory as a file, causing an error, apparently only
- under Windows. Easy_install now skips all directories when processing
- metadata scripts.
-
-0.6.35
-------
-
-
-Note this release is backward-incompatible with distribute 0.6.23-0.6.34 in
-how it parses version numbers.
-
-* Distribute #278: Restored compatibility with distribute 0.6.22 and setuptools
- 0.6. Updated the documentation to match more closely with the version
- parsing as intended in setuptools 0.6.
-
-0.6.34
-------
-
-* Distribute #341: 0.6.33 fails to build under Python 2.4.
-
-0.6.33
-------
-
-* Fix 2 errors with Jython 2.5.
-* Fix 1 failure with Jython 2.5 and 2.7.
-* Disable workaround for Jython scripts on Linux systems.
-* Distribute #336: `setup.py` no longer masks failure exit code when tests fail.
-* Fix issue in pkg_resources where try/except around a platform-dependent
- import would trigger hook load failures on Mercurial. See pull request 32
- for details.
-* Distribute #341: Fix a ResourceWarning.
-
-0.6.32
-------
-
-* Fix test suite with Python 2.6.
-* Fix some DeprecationWarnings and ResourceWarnings.
-* Distribute #335: Backed out `setup_requires` superceding installed requirements
- until regression can be addressed.
-
-0.6.31
-------
-
-* Distribute #303: Make sure the manifest only ever contains UTF-8 in Python 3.
-* Distribute #329: Properly close files created by tests for compatibility with
- Jython.
-* Work around Jython #1980 and Jython #1981.
-* Distribute #334: Provide workaround for packages that reference `sys.__stdout__`
- such as numpy does. This change should address
- `virtualenv #359 <https://github.com/pypa/virtualenv/issues/359>`_ as long
- as the system encoding is UTF-8 or the IO encoding is specified in the
- environment, i.e.::
-
- PYTHONIOENCODING=utf8 pip install numpy
-
-* Fix for encoding issue when installing from Windows executable on Python 3.
-* Distribute #323: Allow `setup_requires` requirements to supercede installed
- requirements. Added some new keyword arguments to existing pkg_resources
- methods. Also had to updated how __path__ is handled for namespace packages
- to ensure that when a new egg distribution containing a namespace package is
- placed on sys.path, the entries in __path__ are found in the same order they
- would have been in had that egg been on the path when pkg_resources was
- first imported.
-
-0.6.30
-------
-
-* Distribute #328: Clean up temporary directories in distribute_setup.py.
-* Fix fatal bug in distribute_setup.py.
-
-0.6.29
-------
-
-* BB Pull Request #14: Honor file permissions in zip files.
-* Distribute #327: Merged pull request #24 to fix a dependency problem with pip.
-* Merged pull request #23 to fix https://github.com/pypa/virtualenv/issues/301.
-* If Sphinx is installed, the `upload_docs` command now runs `build_sphinx`
- to produce uploadable documentation.
-* Distribute #326: `upload_docs` provided mangled auth credentials under Python 3.
-* Distribute #320: Fix check for "createable" in distribute_setup.py.
-* Distribute #305: Remove a warning that was triggered during normal operations.
-* Distribute #311: Print metadata in UTF-8 independent of platform.
-* Distribute #303: Read manifest file with UTF-8 encoding under Python 3.
-* Distribute #301: Allow to run tests of namespace packages when using 2to3.
-* Distribute #304: Prevent import loop in site.py under Python 3.3.
-* Distribute #283: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3.
-* Distribute #299: The develop command didn't work on Python 3, when using 2to3,
- as the egg link would go to the Python 2 source. Linking to the 2to3'd code
- in build/lib makes it work, although you will have to rebuild the module
- before testing it.
-* Distribute #306: Even if 2to3 is used, we build in-place under Python 2.
-* Distribute #307: Prints the full path when .svn/entries is broken.
-* Distribute #313: Support for sdist subcommands (Python 2.7)
-* Distribute #314: test_local_index() would fail an OS X.
-* Distribute #310: Non-ascii characters in a namespace __init__.py causes errors.
-* Distribute #218: Improved documentation on behavior of `package_data` and
- `include_package_data`. Files indicated by `package_data` are now included
- in the manifest.
-* `distribute_setup.py` now allows a `--download-base` argument for retrieving
- distribute from a specified location.
-
-0.6.28
-------
-
-* Distribute #294: setup.py can now be invoked from any directory.
-* Scripts are now installed honoring the umask.
-* Added support for .dist-info directories.
-* Distribute #283: Fix and disable scanning of `*.pyc` / `*.pyo` files on
- Python 3.3.
-
-0.6.27
-------
-
-* Support current snapshots of CPython 3.3.
-* Distribute now recognizes README.rst as a standard, default readme file.
-* Exclude 'encodings' modules when removing modules from sys.modules.
- Workaround for #285.
-* Distribute #231: Don't fiddle with system python when used with buildout
- (bootstrap.py)
-
-0.6.26
-------
-
-* Distribute #183: Symlinked files are now extracted from source distributions.
-* Distribute #227: Easy_install fetch parameters are now passed during the
- installation of a source distribution; now fulfillment of setup_requires
- dependencies will honor the parameters passed to easy_install.
-
-0.6.25
-------
-
-* Distribute #258: Workaround a cache issue
-* Distribute #260: distribute_setup.py now accepts the --user parameter for
- Python 2.6 and later.
-* Distribute #262: package_index.open_with_auth no longer throws LookupError
- on Python 3.
-* Distribute #269: AttributeError when an exception occurs reading Manifest.in
- on late releases of Python.
-* Distribute #272: Prevent TypeError when namespace package names are unicode
- and single-install-externally-managed is used. Also fixes PIP issue
- 449.
-* Distribute #273: Legacy script launchers now install with Python2/3 support.
-
-0.6.24
-------
-
-* Distribute #249: Added options to exclude 2to3 fixers
-
-0.6.23
-------
-
-* Distribute #244: Fixed a test
-* Distribute #243: Fixed a test
-* Distribute #239: Fixed a test
-* Distribute #240: Fixed a test
-* Distribute #241: Fixed a test
-* Distribute #237: Fixed a test
-* Distribute #238: easy_install now uses 64bit executable wrappers on 64bit Python
-* Distribute #208: Fixed parsed_versions, it now honors post-releases as noted in the documentation
-* Distribute #207: Windows cli and gui wrappers pass CTRL-C to child python process
-* Distribute #227: easy_install now passes its arguments to setup.py bdist_egg
-* Distribute #225: Fixed a NameError on Python 2.5, 2.4
-
-0.6.21
-------
-
-* Distribute #225: FIxed a regression on py2.4
-
-0.6.20
-------
-
-* Distribute #135: Include url in warning when processing URLs in package_index.
-* Distribute #212: Fix issue where easy_instal fails on Python 3 on windows installer.
-* Distribute #213: Fix typo in documentation.
-
-0.6.19
-------
-
-* Distribute #206: AttributeError: 'HTTPMessage' object has no attribute 'getheaders'
-
-0.6.18
-------
-
-* Distribute #210: Fixed a regression introduced by Distribute #204 fix.
-
-0.6.17
-------
-
-* Support 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT' environment
- variable to allow to disable installation of easy_install-${version} script.
-* Support Python >=3.1.4 and >=3.2.1.
-* Distribute #204: Don't try to import the parent of a namespace package in
- declare_namespace
-* Distribute #196: Tolerate responses with multiple Content-Length headers
-* Distribute #205: Sandboxing doesn't preserve working_set. Leads to setup_requires
- problems.
-
-0.6.16
-------
-
-* Builds sdist gztar even on Windows (avoiding Distribute #193).
-* Distribute #192: Fixed metadata omitted on Windows when package_dir
- specified with forward-slash.
-* Distribute #195: Cython build support.
-* Distribute #200: Issues with recognizing 64-bit packages on Windows.
-
-0.6.15
-------
-
-* Fixed typo in bdist_egg
-* Several issues under Python 3 has been solved.
-* Distribute #146: Fixed missing DLL files after easy_install of windows exe package.
-
-0.6.14
-------
-
-* Distribute #170: Fixed unittest failure. Thanks to Toshio.
-* Distribute #171: Fixed race condition in unittests cause deadlocks in test suite.
-* Distribute #143: Fixed a lookup issue with easy_install.
- Thanks to David and Zooko.
-* Distribute #174: Fixed the edit mode when its used with setuptools itself
-
-0.6.13
-------
-
-* Distribute #160: 2.7 gives ValueError("Invalid IPv6 URL")
-* Distribute #150: Fixed using ~/.local even in a --no-site-packages virtualenv
-* Distribute #163: scan index links before external links, and don't use the md5 when
- comparing two distributions
-
-0.6.12
-------
-
-* Distribute #149: Fixed various failures on 2.3/2.4
-
-0.6.11
-------
-
-* Found another case of SandboxViolation - fixed
-* Distribute #15 and Distribute #48: Introduced a socket timeout of 15 seconds on url openings
-* Added indexsidebar.html into MANIFEST.in
-* Distribute #108: Fixed TypeError with Python3.1
-* Distribute #121: Fixed --help install command trying to actually install.
-* Distribute #112: Added an os.makedirs so that Tarek's solution will work.
-* Distribute #133: Added --no-find-links to easy_install
-* Added easy_install --user
-* Distribute #100: Fixed develop --user not taking '.' in PYTHONPATH into account
-* Distribute #134: removed spurious UserWarnings. Patch by VanLindberg
-* Distribute #138: cant_write_to_target error when setup_requires is used.
-* Distribute #147: respect the sys.dont_write_bytecode flag
-
-0.6.10
-------
-
-* Reverted change made for the DistributionNotFound exception because
- zc.buildout uses the exception message to get the name of the
- distribution.
-
-0.6.9
------
-
-* Distribute #90: unknown setuptools version can be added in the working set
-* Distribute #87: setupt.py doesn't try to convert distribute_setup.py anymore
- Initial Patch by arfrever.
-* Distribute #89: added a side bar with a download link to the doc.
-* Distribute #86: fixed missing sentence in pkg_resources doc.
-* Added a nicer error message when a DistributionNotFound is raised.
-* Distribute #80: test_develop now works with Python 3.1
-* Distribute #93: upload_docs now works if there is an empty sub-directory.
-* Distribute #70: exec bit on non-exec files
-* Distribute #99: now the standalone easy_install command doesn't uses a
- "setup.cfg" if any exists in the working directory. It will use it
- only if triggered by ``install_requires`` from a setup.py call
- (install, develop, etc).
-* Distribute #101: Allowing ``os.devnull`` in Sandbox
-* Distribute #92: Fixed the "no eggs" found error with MacPort
- (platform.mac_ver() fails)
-* Distribute #103: test_get_script_header_jython_workaround not run
- anymore under py3 with C or POSIX local. Contributed by Arfrever.
-* Distribute #104: remvoved the assertion when the installation fails,
- with a nicer message for the end user.
-* Distribute #100: making sure there's no SandboxViolation when
- the setup script patches setuptools.
-
-0.6.8
------
-
-* Added "check_packages" in dist. (added in Setuptools 0.6c11)
-* Fixed the DONT_PATCH_SETUPTOOLS state.
-
-0.6.7
------
-
-* Distribute #58: Added --user support to the develop command
-* Distribute #11: Generated scripts now wrap their call to the script entry point
- in the standard "if name == 'main'"
-* Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv
- can drive an installation that doesn't patch a global setuptools.
-* Reviewed unladen-swallow specific change from
- http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719
- and determined that it no longer applies. Distribute should work fine with
- Unladen Swallow 2009Q3.
-* Distribute #21: Allow PackageIndex.open_url to gracefully handle all cases of a
- httplib.HTTPException instead of just InvalidURL and BadStatusLine.
-* Removed virtual-python.py from this distribution and updated documentation
- to point to the actively maintained virtualenv instead.
-* Distribute #64: use_setuptools no longer rebuilds the distribute egg every
- time it is run
-* use_setuptools now properly respects the requested version
-* use_setuptools will no longer try to import a distribute egg for the
- wrong Python version
-* Distribute #74: no_fake should be True by default.
-* Distribute #72: avoid a bootstrapping issue with easy_install -U
-
-0.6.6
------
-
-* Unified the bootstrap file so it works on both py2.x and py3k without 2to3
- (patch by Holger Krekel)
-
-0.6.5
------
-
-* Distribute #65: cli.exe and gui.exe are now generated at build time,
- depending on the platform in use.
-
-* Distribute #67: Fixed doc typo (PEP 381/PEP 382).
-
-* Distribute no longer shadows setuptools if we require a 0.7-series
- setuptools. And an error is raised when installing a 0.7 setuptools with
- distribute.
-
-* When run from within buildout, no attempt is made to modify an existing
- setuptools egg, whether in a shared egg directory or a system setuptools.
-
-* Fixed a hole in sandboxing allowing builtin file to write outside of
- the sandbox.
-
-0.6.4
------
-
-* Added the generation of `distribute_setup_3k.py` during the release.
- This closes Distribute #52.
-
-* Added an upload_docs command to easily upload project documentation to
- PyPI's https://pythonhosted.org. This close issue Distribute #56.
-
-* Fixed a bootstrap bug on the use_setuptools() API.
-
-0.6.3
------
-
-setuptools
-==========
-
-* Fixed a bunch of calls to file() that caused crashes on Python 3.
-
-bootstrapping
-=============
-
-* Fixed a bug in sorting that caused bootstrap to fail on Python 3.
-
-0.6.2
------
-
-setuptools
-==========
-
-* Added Python 3 support; see docs/python3.txt.
- This closes Old Setuptools #39.
-
-* Added option to run 2to3 automatically when installing on Python 3.
- This closes issue Distribute #31.
-
-* Fixed invalid usage of requirement.parse, that broke develop -d.
- This closes Old Setuptools #44.
-
-* Fixed script launcher for 64-bit Windows.
- This closes Old Setuptools #2.
-
-* KeyError when compiling extensions.
- This closes Old Setuptools #41.
-
-bootstrapping
-=============
-
-* Fixed bootstrap not working on Windows. This closes issue Distribute #49.
-
-* Fixed 2.6 dependencies. This closes issue Distribute #50.
-
-* Make sure setuptools is patched when running through easy_install
- This closes Old Setuptools #40.
-
-0.6.1
------
-
-setuptools
-==========
-
-* package_index.urlopen now catches BadStatusLine and malformed url errors.
- This closes Distribute #16 and Distribute #18.
-
-* zip_ok is now False by default. This closes Old Setuptools #33.
-
-* Fixed invalid URL error catching. Old Setuptools #20.
-
-* Fixed invalid bootstraping with easy_install installation (Distribute #40).
- Thanks to Florian Schulze for the help.
-
-* Removed buildout/bootstrap.py. A new repository will create a specific
- bootstrap.py script.
-
-
-bootstrapping
-=============
-
-* The boostrap process leave setuptools alone if detected in the system
- and --root or --prefix is provided, but is not in the same location.
- This closes Distribute #10.
-
-0.6
----
-
-setuptools
-==========
-
-* Packages required at build time where not fully present at install time.
- This closes Distribute #12.
-
-* Protected against failures in tarfile extraction. This closes Distribute #10.
-
-* Made Jython api_tests.txt doctest compatible. This closes Distribute #7.
-
-* sandbox.py replaced builtin type file with builtin function open. This
- closes Distribute #6.
-
-* Immediately close all file handles. This closes Distribute #3.
-
-* Added compatibility with Subversion 1.6. This references Distribute #1.
-
-pkg_resources
-=============
-
-* Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API
- instead. Based on a patch from ronaldoussoren. This closes issue #5.
-
-* Fixed a SandboxViolation for mkdir that could occur in certain cases.
- This closes Distribute #13.
-
-* Allow to find_on_path on systems with tight permissions to fail gracefully.
- This closes Distribute #9.
-
-* Corrected inconsistency between documentation and code of add_entry.
- This closes Distribute #8.
-
-* Immediately close all file handles. This closes Distribute #3.
-
-easy_install
-============
-
-* Immediately close all file handles. This closes Distribute #3.
-
-0.6c9
------
-
- * Fixed a missing files problem when using Windows source distributions on
- non-Windows platforms, due to distutils not handling manifest file line
- endings correctly.
-
- * Updated Pyrex support to work with Pyrex 0.9.6 and higher.
-
- * Minor changes for Jython compatibility, including skipping tests that can't
- work on Jython.
-
- * Fixed not installing eggs in ``install_requires`` if they were also used for
- ``setup_requires`` or ``tests_require``.
-
- * Fixed not fetching eggs in ``install_requires`` when running tests.
-
- * Allow ``ez_setup.use_setuptools()`` to upgrade existing setuptools
- installations when called from a standalone ``setup.py``.
-
- * Added a warning if a namespace package is declared, but its parent package
- is not also declared as a namespace.
-
- * Support Subversion 1.5
-
- * Removed use of deprecated ``md5`` module if ``hashlib`` is available
-
- * Fixed ``bdist_wininst upload`` trying to upload the ``.exe`` twice
-
- * Fixed ``bdist_egg`` putting a ``native_libs.txt`` in the source package's
- ``.egg-info``, when it should only be in the built egg's ``EGG-INFO``.
-
- * Ensure that _full_name is set on all shared libs before extensions are
- checked for shared lib usage. (Fixes a bug in the experimental shared
- library build support.)
-
- * Fix to allow unpacked eggs containing native libraries to fail more
- gracefully under Google App Engine (with an ``ImportError`` loading the
- C-based module, instead of getting a ``NameError``).
-
-0.6c7
------
-
- * Fixed ``distutils.filelist.findall()`` crashing on broken symlinks, and
- ``egg_info`` command failing on new, uncommitted SVN directories.
-
- * Fix import problems with nested namespace packages installed via
- ``--root`` or ``--single-version-externally-managed``, due to the
- parent package not having the child package as an attribute.
-
-0.6c6
------
-
- * Added ``--egg-path`` option to ``develop`` command, allowing you to force
- ``.egg-link`` files to use relative paths (allowing them to be shared across
- platforms on a networked drive).
-
- * Fix not building binary RPMs correctly.
-
- * Fix "eggsecutables" (such as setuptools' own egg) only being runnable with
- bash-compatible shells.
-
- * Fix ``#!`` parsing problems in Windows ``.exe`` script wrappers, when there
- was whitespace inside a quoted argument or at the end of the ``#!`` line
- (a regression introduced in 0.6c4).
-
- * Fix ``test`` command possibly failing if an older version of the project
- being tested was installed on ``sys.path`` ahead of the test source
- directory.
-
- * Fix ``find_packages()`` treating ``ez_setup`` and directories with ``.`` in
- their names as packages.
-
-0.6c5
------
-
- * Fix uploaded ``bdist_rpm`` packages being described as ``bdist_egg``
- packages under Python versions less than 2.5.
-
- * Fix uploaded ``bdist_wininst`` packages being described as suitable for
- "any" version by Python 2.5, even if a ``--target-version`` was specified.
-
-0.6c4
------
-
- * Overhauled Windows script wrapping to support ``bdist_wininst`` better.
- Scripts installed with ``bdist_wininst`` will always use ``#!python.exe`` or
- ``#!pythonw.exe`` as the executable name (even when built on non-Windows
- platforms!), and the wrappers will look for the executable in the script's
- parent directory (which should find the right version of Python).
-
- * Fix ``upload`` command not uploading files built by ``bdist_rpm`` or
- ``bdist_wininst`` under Python 2.3 and 2.4.
-
- * Add support for "eggsecutable" headers: a ``#!/bin/sh`` script that is
- prepended to an ``.egg`` file to allow it to be run as a script on Unix-ish
- platforms. (This is mainly so that setuptools itself can have a single-file
- installer on Unix, without doing multiple downloads, dealing with firewalls,
- etc.)
-
- * Fix problem with empty revision numbers in Subversion 1.4 ``entries`` files
-
- * Use cross-platform relative paths in ``easy-install.pth`` when doing
- ``develop`` and the source directory is a subdirectory of the installation
- target directory.
-
- * Fix a problem installing eggs with a system packaging tool if the project
- contained an implicit namespace package; for example if the ``setup()``
- listed a namespace package ``foo.bar`` without explicitly listing ``foo``
- as a namespace package.
-
-0.6c3
------
-
- * Fixed breakages caused by Subversion 1.4's new "working copy" format
-
-0.6c2
------
-
- * The ``ez_setup`` module displays the conflicting version of setuptools (and
- its installation location) when a script requests a version that's not
- available.
-
- * Running ``setup.py develop`` on a setuptools-using project will now install
- setuptools if needed, instead of only downloading the egg.
-
-0.6c1
------
-
- * Fixed ``AttributeError`` when trying to download a ``setup_requires``
- dependency when a distribution lacks a ``dependency_links`` setting.
-
- * Made ``zip-safe`` and ``not-zip-safe`` flag files contain a single byte, so
- as to play better with packaging tools that complain about zero-length
- files.
-
- * Made ``setup.py develop`` respect the ``--no-deps`` option, which it
- previously was ignoring.
-
- * Support ``extra_path`` option to ``setup()`` when ``install`` is run in
- backward-compatibility mode.
-
- * Source distributions now always include a ``setup.cfg`` file that explicitly
- sets ``egg_info`` options such that they produce an identical version number
- to the source distribution's version number. (Previously, the default
- version number could be different due to the use of ``--tag-date``, or if
- the version was overridden on the command line that built the source
- distribution.)
-
-0.6b4
------
-
- * Fix ``register`` not obeying name/version set by ``egg_info`` command, if
- ``egg_info`` wasn't explicitly run first on the same command line.
-
- * Added ``--no-date`` and ``--no-svn-revision`` options to ``egg_info``
- command, to allow suppressing tags configured in ``setup.cfg``.
-
- * Fixed redundant warnings about missing ``README`` file(s); it should now
- appear only if you are actually a source distribution.
-
-0.6b3
------
-
- * Fix ``bdist_egg`` not including files in subdirectories of ``.egg-info``.
-
- * Allow ``.py`` files found by the ``include_package_data`` option to be
- automatically included. Remove duplicate data file matches if both
- ``include_package_data`` and ``package_data`` are used to refer to the same
- files.
-
-0.6b1
------
-
- * Strip ``module`` from the end of compiled extension modules when computing
- the name of a ``.py`` loader/wrapper. (Python's import machinery ignores
- this suffix when searching for an extension module.)
-
-0.6a11
-------
-
- * Added ``test_loader`` keyword to support custom test loaders
-
- * Added ``setuptools.file_finders`` entry point group to allow implementing
- revision control plugins.
-
- * Added ``--identity`` option to ``upload`` command.
-
- * Added ``dependency_links`` to allow specifying URLs for ``--find-links``.
-
- * Enhanced test loader to scan packages as well as modules, and call
- ``additional_tests()`` if present to get non-unittest tests.
-
- * Support namespace packages in conjunction with system packagers, by omitting
- the installation of any ``__init__.py`` files for namespace packages, and
- adding a special ``.pth`` file to create a working package in
- ``sys.modules``.
-
- * Made ``--single-version-externally-managed`` automatic when ``--root`` is
- used, so that most system packagers won't require special support for
- setuptools.
-
- * Fixed ``setup_requires``, ``tests_require``, etc. not using ``setup.cfg`` or
- other configuration files for their option defaults when installing, and
- also made the install use ``--multi-version`` mode so that the project
- directory doesn't need to support .pth files.
-
- * ``MANIFEST.in`` is now forcibly closed when any errors occur while reading
- it. Previously, the file could be left open and the actual error would be
- masked by problems trying to remove the open file on Windows systems.
-
-0.6a10
-------
-
- * Fixed the ``develop`` command ignoring ``--find-links``.
-
-0.6a9
------
-
- * The ``sdist`` command no longer uses the traditional ``MANIFEST`` file to
- create source distributions. ``MANIFEST.in`` is still read and processed,
- as are the standard defaults and pruning. But the manifest is built inside
- the project's ``.egg-info`` directory as ``SOURCES.txt``, and it is rebuilt
- every time the ``egg_info`` command is run.
-
- * Added the ``include_package_data`` keyword to ``setup()``, allowing you to
- automatically include any package data listed in revision control or
- ``MANIFEST.in``
-
- * Added the ``exclude_package_data`` keyword to ``setup()``, allowing you to
- trim back files included via the ``package_data`` and
- ``include_package_data`` options.
-
- * Fixed ``--tag-svn-revision`` not working when run from a source
- distribution.
-
- * Added warning for namespace packages with missing ``declare_namespace()``
-
- * Added ``tests_require`` keyword to ``setup()``, so that e.g. packages
- requiring ``nose`` to run unit tests can make this dependency optional
- unless the ``test`` command is run.
-
- * Made all commands that use ``easy_install`` respect its configuration
- options, as this was causing some problems with ``setup.py install``.
-
- * Added an ``unpack_directory()`` driver to ``setuptools.archive_util``, so
- that you can process a directory tree through a processing filter as if it
- were a zipfile or tarfile.
-
- * Added an internal ``install_egg_info`` command to use as part of old-style
- ``install`` operations, that installs an ``.egg-info`` directory with the
- package.
-
- * Added a ``--single-version-externally-managed`` option to the ``install``
- command so that you can more easily wrap a "flat" egg in a system package.
-
- * Enhanced ``bdist_rpm`` so that it installs single-version eggs that
- don't rely on a ``.pth`` file. The ``--no-egg`` option has been removed,
- since all RPMs are now built in a more backwards-compatible format.
-
- * Support full roundtrip translation of eggs to and from ``bdist_wininst``
- format. Running ``bdist_wininst`` on a setuptools-based package wraps the
- egg in an .exe that will safely install it as an egg (i.e., with metadata
- and entry-point wrapper scripts), and ``easy_install`` can turn the .exe
- back into an ``.egg`` file or directory and install it as such.
-
-
-0.6a8
------
-
- * Fixed some problems building extensions when Pyrex was installed, especially
- with Python 2.4 and/or packages using SWIG.
-
- * Made ``develop`` command accept all the same options as ``easy_install``,
- and use the ``easy_install`` command's configuration settings as defaults.
-
- * Made ``egg_info --tag-svn-revision`` fall back to extracting the revision
- number from ``PKG-INFO`` in case it is being run on a source distribution of
- a snapshot taken from a Subversion-based project.
-
- * Automatically detect ``.dll``, ``.so`` and ``.dylib`` files that are being
- installed as data, adding them to ``native_libs.txt`` automatically.
-
- * Fixed some problems with fresh checkouts of projects that don't include
- ``.egg-info/PKG-INFO`` under revision control and put the project's source
- code directly in the project directory. If such a package had any
- requirements that get processed before the ``egg_info`` command can be run,
- the setup scripts would fail with a "Missing 'Version:' header and/or
- PKG-INFO file" error, because the egg runtime interpreted the unbuilt
- metadata in a directory on ``sys.path`` (i.e. the current directory) as
- being a corrupted egg. Setuptools now monkeypatches the distribution
- metadata cache to pretend that the egg has valid version information, until
- it has a chance to make it actually be so (via the ``egg_info`` command).
-
-0.6a5
------
-
- * Fixed missing gui/cli .exe files in distribution. Fixed bugs in tests.
-
-0.6a3
------
-
- * Added ``gui_scripts`` entry point group to allow installing GUI scripts
- on Windows and other platforms. (The special handling is only for Windows;
- other platforms are treated the same as for ``console_scripts``.)
-
-0.6a2
------
-
- * Added ``console_scripts`` entry point group to allow installing scripts
- without the need to create separate script files. On Windows, console
- scripts get an ``.exe`` wrapper so you can just type their name. On other
- platforms, the scripts are written without a file extension.
-
-0.6a1
------
-
- * Added support for building "old-style" RPMs that don't install an egg for
- the target package, using a ``--no-egg`` option.
-
- * The ``build_ext`` command now works better when using the ``--inplace``
- option and multiple Python versions. It now makes sure that all extensions
- match the current Python version, even if newer copies were built for a
- different Python version.
-
- * The ``upload`` command no longer attaches an extra ``.zip`` when uploading
- eggs, as PyPI now supports egg uploads without trickery.
-
- * The ``ez_setup`` script/module now displays a warning before downloading
- the setuptools egg, and attempts to check the downloaded egg against an
- internal MD5 checksum table.
-
- * Fixed the ``--tag-svn-revision`` option of ``egg_info`` not finding the
- latest revision number; it was using the revision number of the directory
- containing ``setup.py``, not the highest revision number in the project.
-
- * Added ``eager_resources`` setup argument
-
- * The ``sdist`` command now recognizes Subversion "deleted file" entries and
- does not include them in source distributions.
-
- * ``setuptools`` now embeds itself more thoroughly into the distutils, so that
- other distutils extensions (e.g. py2exe, py2app) will subclass setuptools'
- versions of things, rather than the native distutils ones.
-
- * Added ``entry_points`` and ``setup_requires`` arguments to ``setup()``;
- ``setup_requires`` allows you to automatically find and download packages
- that are needed in order to *build* your project (as opposed to running it).
-
- * ``setuptools`` now finds its commands, ``setup()`` argument validators, and
- metadata writers using entry points, so that they can be extended by
- third-party packages. See `Creating distutils Extensions
- <http://pythonhosted.org/setuptools/setuptools.html#creating-distutils-extensions>`_
- for more details.
-
- * The vestigial ``depends`` command has been removed. It was never finished
- or documented, and never would have worked without EasyInstall - which it
- pre-dated and was never compatible with.
-
-0.5a12
-------
-
- * The zip-safety scanner now checks for modules that might be used with
- ``python -m``, and marks them as unsafe for zipping, since Python 2.4 can't
- handle ``-m`` on zipped modules.
-
-0.5a11
-------
-
- * Fix breakage of the "develop" command that was caused by the addition of
- ``--always-unzip`` to the ``easy_install`` command.
-
-0.5a9
------
-
- * Include ``svn:externals`` directories in source distributions as well as
- normal subversion-controlled files and directories.
-
- * Added ``exclude=patternlist`` option to ``setuptools.find_packages()``
-
- * Changed --tag-svn-revision to include an "r" in front of the revision number
- for better readability.
-
- * Added ability to build eggs without including source files (except for any
- scripts, of course), using the ``--exclude-source-files`` option to
- ``bdist_egg``.
-
- * ``setup.py install`` now automatically detects when an "unmanaged" package
- or module is going to be on ``sys.path`` ahead of a package being installed,
- thereby preventing the newer version from being imported. If this occurs,
- a warning message is output to ``sys.stderr``, but installation proceeds
- anyway. The warning message informs the user what files or directories
- need deleting, and advises them they can also use EasyInstall (with the
- ``--delete-conflicting`` option) to do it automatically.
-
- * The ``egg_info`` command now adds a ``top_level.txt`` file to the metadata
- directory that lists all top-level modules and packages in the distribution.
- This is used by the ``easy_install`` command to find possibly-conflicting
- "unmanaged" packages when installing the distribution.
-
- * Added ``zip_safe`` and ``namespace_packages`` arguments to ``setup()``.
- Added package analysis to determine zip-safety if the ``zip_safe`` flag
- is not given, and advise the author regarding what code might need changing.
-
- * Fixed the swapped ``-d`` and ``-b`` options of ``bdist_egg``.
-
-0.5a8
------
-
- * The "egg_info" command now always sets the distribution metadata to "safe"
- forms of the distribution name and version, so that distribution files will
- be generated with parseable names (i.e., ones that don't include '-' in the
- name or version). Also, this means that if you use the various ``--tag``
- options of "egg_info", any distributions generated will use the tags in the
- version, not just egg distributions.
-
- * Added support for defining command aliases in distutils configuration files,
- under the "[aliases]" section. To prevent recursion and to allow aliases to
- call the command of the same name, a given alias can be expanded only once
- per command-line invocation. You can define new aliases with the "alias"
- command, either for the local, global, or per-user configuration.
-
- * Added "rotate" command to delete old distribution files, given a set of
- patterns to match and the number of files to keep. (Keeps the most
- recently-modified distribution files matching each pattern.)
-
- * Added "saveopts" command that saves all command-line options for the current
- invocation to the local, global, or per-user configuration file. Useful for
- setting defaults without having to hand-edit a configuration file.
-
- * Added a "setopt" command that sets a single option in a specified distutils
- configuration file.
-
-0.5a7
------
-
- * Added "upload" support for egg and source distributions, including a bug
- fix for "upload" and a temporary workaround for lack of .egg support in
- PyPI.
-
-0.5a6
------
-
- * Beefed up the "sdist" command so that if you don't have a MANIFEST.in, it
- will include all files under revision control (CVS or Subversion) in the
- current directory, and it will regenerate the list every time you create a
- source distribution, not just when you tell it to. This should make the
- default "do what you mean" more often than the distutils' default behavior
- did, while still retaining the old behavior in the presence of MANIFEST.in.
-
- * Fixed the "develop" command always updating .pth files, even if you
- specified ``-n`` or ``--dry-run``.
-
- * Slightly changed the format of the generated version when you use
- ``--tag-build`` on the "egg_info" command, so that you can make tagged
- revisions compare *lower* than the version specified in setup.py (e.g. by
- using ``--tag-build=dev``).
-
-0.5a5
------
-
- * Added ``develop`` command to ``setuptools``-based packages. This command
- installs an ``.egg-link`` pointing to the package's source directory, and
- script wrappers that ``execfile()`` the source versions of the package's
- scripts. This lets you put your development checkout(s) on sys.path without
- having to actually install them. (To uninstall the link, use
- use ``setup.py develop --uninstall``.)
-
- * Added ``egg_info`` command to ``setuptools``-based packages. This command
- just creates or updates the "projectname.egg-info" directory, without
- building an egg. (It's used by the ``bdist_egg``, ``test``, and ``develop``
- commands.)
-
- * Enhanced the ``test`` command so that it doesn't install the package, but
- instead builds any C extensions in-place, updates the ``.egg-info``
- metadata, adds the source directory to ``sys.path``, and runs the tests
- directly on the source. This avoids an "unmanaged" installation of the
- package to ``site-packages`` or elsewhere.
-
- * Made ``easy_install`` a standard ``setuptools`` command, moving it from
- the ``easy_install`` module to ``setuptools.command.easy_install``. Note
- that if you were importing or extending it, you must now change your imports
- accordingly. ``easy_install.py`` is still installed as a script, but not as
- a module.
-
-0.5a4
------
-
- * Setup scripts using setuptools can now list their dependencies directly in
- the setup.py file, without having to manually create a ``depends.txt`` file.
- The ``install_requires`` and ``extras_require`` arguments to ``setup()``
- are used to create a dependencies file automatically. If you are manually
- creating ``depends.txt`` right now, please switch to using these setup
- arguments as soon as practical, because ``depends.txt`` support will be
- removed in the 0.6 release cycle. For documentation on the new arguments,
- see the ``setuptools.dist.Distribution`` class.
-
- * Setup scripts using setuptools now always install using ``easy_install``
- internally, for ease of uninstallation and upgrading.
-
-0.5a1
------
-
- * Added support for "self-installation" bootstrapping. Packages can now
- include ``ez_setup.py`` in their source distribution, and add the following
- to their ``setup.py``, in order to automatically bootstrap installation of
- setuptools as part of their setup process::
-
- from ez_setup import use_setuptools
- use_setuptools()
-
- from setuptools import setup
- # etc...
-
-0.4a2
------
-
- * Added ``ez_setup.py`` installer/bootstrap script to make initial setuptools
- installation easier, and to allow distributions using setuptools to avoid
- having to include setuptools in their source distribution.
-
- * All downloads are now managed by the ``PackageIndex`` class (which is now
- subclassable and replaceable), so that embedders can more easily override
- download logic, give download progress reports, etc. The class has also
- been moved to the new ``setuptools.package_index`` module.
-
- * The ``Installer`` class no longer handles downloading, manages a temporary
- directory, or tracks the ``zip_ok`` option. Downloading is now handled
- by ``PackageIndex``, and ``Installer`` has become an ``easy_install``
- command class based on ``setuptools.Command``.
-
- * There is a new ``setuptools.sandbox.run_setup()`` API to invoke a setup
- script in a directory sandbox, and a new ``setuptools.archive_util`` module
- with an ``unpack_archive()`` API. These were split out of EasyInstall to
- allow reuse by other tools and applications.
-
- * ``setuptools.Command`` now supports reinitializing commands using keyword
- arguments to set/reset options. Also, ``Command`` subclasses can now set
- their ``command_consumes_arguments`` attribute to ``True`` in order to
- receive an ``args`` option containing the rest of the command line.
-
-0.3a2
------
-
- * Added new options to ``bdist_egg`` to allow tagging the egg's version number
- with a subversion revision number, the current date, or an explicit tag
- value. Run ``setup.py bdist_egg --help`` to get more information.
-
- * Misc. bug fixes
-
-0.3a1
------
-
- * Initial release.
-
diff --git a/MANIFEST.in b/MANIFEST.in
deleted file mode 100644
index 668e13ce..00000000
--- a/MANIFEST.in
+++ /dev/null
@@ -1,12 +0,0 @@
-recursive-include setuptools *.py *.exe *.xml
-recursive-include tests *.py
-recursive-include setuptools/tests *.html
-recursive-include docs *.py *.txt *.conf *.css *.css_t Makefile indexsidebar.html
-recursive-include setuptools/_vendor *
-recursive-include pkg_resources *.py *.txt
-include *.py
-include *.txt
-include MANIFEST.in
-include launcher.c
-include msvc-build-launcher.cmd
-include pytest.ini
diff --git a/README.rst b/README.rst
deleted file mode 100755
index f94c6fcb..00000000
--- a/README.rst
+++ /dev/null
@@ -1,236 +0,0 @@
-===============================
-Installing and Using Setuptools
-===============================
-
-.. contents:: **Table of Contents**
-
-
-`Change History <https://pythonhosted.org/setuptools/history.html>`_.
-
--------------------------
-Installation Instructions
--------------------------
-
-The recommended way to bootstrap setuptools on any system is to download
-`ez_setup.py`_ and run it using the target Python environment. Different
-operating systems have different recommended techniques to accomplish this
-basic routine, so below are some examples to get you started.
-
-Setuptools requires Python 2.6 or later. To install setuptools
-on Python 2.4 or Python 2.5, use the `bootstrap script for Setuptools 1.x
-<https://raw.githubusercontent.com/pypa/setuptools/bootstrap-py24/ez_setup.py>`_.
-
-The link provided to ez_setup.py is a bookmark to bootstrap script for the
-latest known stable release.
-
-.. _ez_setup.py: https://bootstrap.pypa.io/ez_setup.py
-
-Windows (Powershell 3 or later)
-===============================
-
-For best results, uninstall previous versions FIRST (see `Uninstalling`_).
-
-Using Windows 8 (which includes PowerShell 3) or earlier versions of Windows
-with PowerShell 3 installed, it's possible to install with one simple
-Powershell command. Start up Powershell and paste this command::
-
- > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python -
-
-You must start the Powershell with Administrative privileges or you may choose
-to install a user-local installation::
-
- > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - --user
-
-If you have Python 3.3 or later, you can use the ``py`` command to install to
-different Python versions. For example, to install to Python 3.3 if you have
-Python 2.7 installed::
-
- > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | py -3 -
-
-The recommended way to install setuptools on Windows is to download
-`ez_setup.py`_ and run it. The script will download the appropriate
-distribution file and install it for you.
-
-Once installation is complete, you will find an ``easy_install`` program in
-your Python ``Scripts`` subdirectory. For simple invocation and best results,
-add this directory to your ``PATH`` environment variable, if it is not already
-present. If you did a user-local install, the ``Scripts`` subdirectory is
-``$env:APPDATA\Python\Scripts``.
-
-
-Windows (simplified)
-====================
-
-For Windows without PowerShell 3 or for installation without a command-line,
-download `ez_setup.py`_ using your preferred web browser or other technique
-and "run" that file.
-
-
-Unix (wget)
-===========
-
-Most Linux distributions come with wget.
-
-Download `ez_setup.py`_ and run it using the target Python version. The script
-will download the appropriate version and install it for you::
-
- > wget https://bootstrap.pypa.io/ez_setup.py -O - | python
-
-Note that you will may need to invoke the command with superuser privileges to
-install to the system Python::
-
- > wget https://bootstrap.pypa.io/ez_setup.py -O - | sudo python
-
-Alternatively, Setuptools may be installed to a user-local path::
-
- > wget https://bootstrap.pypa.io/ez_setup.py -O - | python - --user
-
-Note that on some older systems (noted on Debian 6 and CentOS 5 installations),
-`wget` may refuse to download `ez_setup.py`, complaining that the certificate common name `*.c.ssl.fastly.net`
-does not match the host name `bootstrap.pypa.io`. In addition, the `ez_setup.py` script may then encounter similar problems using
-`wget` internally to download `setuptools-x.y.zip`, complaining that the certificate common name of `www.python.org` does not match the
-host name `pypi.python.org`. Those are known issues, related to a bug in the older versions of `wget`
-(see `Issue 59 <https://bitbucket.org/pypa/pypi/issue/59#comment-5881915>`_). If you happen to encounter them,
-install Setuptools as follows::
-
- > wget --no-check-certificate https://bootstrap.pypa.io/ez_setup.py
- > python ez_setup.py --insecure
-
-
-Unix including Mac OS X (curl)
-==============================
-
-If your system has curl installed, follow the ``wget`` instructions but
-replace ``wget`` with ``curl`` and ``-O`` with ``-o``. For example::
-
- > curl https://bootstrap.pypa.io/ez_setup.py -o - | python
-
-
-Advanced Installation
-=====================
-
-For more advanced installation options, such as installing to custom
-locations or prefixes, download and extract the source
-tarball from `Setuptools on PyPI <https://pypi.python.org/pypi/setuptools>`_
-and run setup.py with any supported distutils and Setuptools options.
-For example::
-
- setuptools-x.x$ python setup.py install --prefix=/opt/setuptools
-
-Use ``--help`` to get a full options list, but we recommend consulting
-the `EasyInstall manual`_ for detailed instructions, especially `the section
-on custom installation locations`_.
-
-.. _EasyInstall manual: https://pythonhosted.org/setuptools/EasyInstall
-.. _the section on custom installation locations: https://pythonhosted.org/setuptools/EasyInstall#custom-installation-locations
-
-
-Downloads
-=========
-
-All setuptools downloads can be found at `the project's home page in the Python
-Package Index`_. Scroll to the very bottom of the page to find the links.
-
-.. _the project's home page in the Python Package Index: https://pypi.python.org/pypi/setuptools
-
-In addition to the PyPI downloads, the development version of ``setuptools``
-is available from the `Bitbucket repo`_, and in-development versions of the
-`0.6 branch`_ are available as well.
-
-.. _Bitbucket repo: https://bitbucket.org/pypa/setuptools/get/default.tar.gz#egg=setuptools-dev
-.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06
-
-Uninstalling
-============
-
-On Windows, if Setuptools was installed using an ``.exe`` or ``.msi``
-installer, simply use the uninstall feature of "Add/Remove Programs" in the
-Control Panel.
-
-Otherwise, to uninstall Setuptools or Distribute, regardless of the Python
-version, delete all ``setuptools*`` and ``distribute*`` files and
-directories from your system's ``site-packages`` directory
-(and any other ``sys.path`` directories) FIRST.
-
-If you are upgrading or otherwise plan to re-install Setuptools or Distribute,
-nothing further needs to be done. If you want to completely remove Setuptools,
-you may also want to remove the 'easy_install' and 'easy_install-x.x' scripts
-and associated executables installed to the Python scripts directory.
-
---------------------------------
-Using Setuptools and EasyInstall
---------------------------------
-
-Here are some of the available manuals, tutorials, and other resources for
-learning about Setuptools, Python Eggs, and EasyInstall:
-
-* `The EasyInstall user's guide and reference manual`_
-* `The setuptools Developer's Guide`_
-* `The pkg_resources API reference`_
-* `The Internal Structure of Python Eggs`_
-
-Questions, comments, and bug reports should be directed to the `distutils-sig
-mailing list`_. If you have written (or know of) any tutorials, documentation,
-plug-ins, or other resources for setuptools users, please let us know about
-them there, so this reference list can be updated. If you have working,
-*tested* patches to correct problems or add features, you may submit them to
-the `setuptools bug tracker`_.
-
-.. _setuptools bug tracker: https://github.com/pypa/setuptools/issues
-.. _The Internal Structure of Python Eggs: https://pythonhosted.org/setuptools/formats.html
-.. _The setuptools Developer's Guide: https://pythonhosted.org/setuptools/setuptools.html
-.. _The pkg_resources API reference: https://pythonhosted.org/setuptools/pkg_resources.html
-.. _The EasyInstall user's guide and reference manual: https://pythonhosted.org/setuptools/easy_install.html
-.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/
-
-
--------
-Credits
--------
-
-* The original design for the ``.egg`` format and the ``pkg_resources`` API was
- co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first
- version of ``pkg_resources``, and supplied the OS X operating system version
- compatibility algorithm.
-
-* Ian Bicking implemented many early "creature comfort" features of
- easy_install, including support for downloading via Sourceforge and
- Subversion repositories. Ian's comments on the Web-SIG about WSGI
- application deployment also inspired the concept of "entry points" in eggs,
- and he has given talks at PyCon and elsewhere to inform and educate the
- community about eggs and setuptools.
-
-* Jim Fulton contributed time and effort to build automated tests of various
- aspects of ``easy_install``, and supplied the doctests for the command-line
- ``.exe`` wrappers on Windows.
-
-* Phillip J. Eby is the seminal author of setuptools, and
- first proposed the idea of an importable binary distribution format for
- Python application plug-ins.
-
-* Significant parts of the implementation of setuptools were funded by the Open
- Source Applications Foundation, to provide a plug-in infrastructure for the
- Chandler PIM application. In addition, many OSAF staffers (such as Mike
- "Code Bear" Taylor) contributed their time and stress as guinea pigs for the
- use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!)
-
-* Tarek Ziadé is the principal author of the Distribute fork, which
- re-invigorated the community on the project, encouraged renewed innovation,
- and addressed many defects.
-
-* Since the merge with Distribute, Jason R. Coombs is the
- maintainer of setuptools. The project is maintained in coordination with
- the Python Packaging Authority (PyPA) and the larger Python community.
-
-.. _files:
-
-
----------------
-Code of Conduct
----------------
-
-Everyone interacting in the setuptools project's codebases, issue trackers,
-chat rooms, and mailing lists is expected to follow the
-`PyPA Code of Conduct`_.
-
-.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/
diff --git a/bootstrap.py b/bootstrap.py
deleted file mode 100644
index 70f96258..00000000
--- a/bootstrap.py
+++ /dev/null
@@ -1,57 +0,0 @@
-"""
-If setuptools is not already installed in the environment, it's not possible
-to invoke setuptools' own commands. This routine will bootstrap this local
-environment by creating a minimal egg-info directory and then invoking the
-egg-info command to flesh out the egg-info directory.
-"""
-
-import os
-import sys
-import textwrap
-import subprocess
-
-
-minimal_egg_info = textwrap.dedent("""
- [distutils.commands]
- egg_info = setuptools.command.egg_info:egg_info
-
- [distutils.setup_keywords]
- include_package_data = setuptools.dist:assert_bool
- install_requires = setuptools.dist:check_requirements
- extras_require = setuptools.dist:check_extras
- entry_points = setuptools.dist:check_entry_points
-
- [egg_info.writers]
- dependency_links.txt = setuptools.command.egg_info:overwrite_arg
- entry_points.txt = setuptools.command.egg_info:write_entries
- requires.txt = setuptools.command.egg_info:write_requirements
- """)
-
-def ensure_egg_info():
- if os.path.exists('setuptools.egg-info'):
- return
- print("adding minimal entry_points")
- build_egg_info()
-
-
-def build_egg_info():
- """
- Build a minimal egg-info, enough to invoke egg_info
- """
-
- os.mkdir('setuptools.egg-info')
- with open('setuptools.egg-info/entry_points.txt', 'w') as ep:
- ep.write(minimal_egg_info)
-
-
-def run_egg_info():
- cmd = [sys.executable, 'setup.py', 'egg_info']
- print("Regenerating egg_info")
- subprocess.check_call(cmd)
- print("...and again.")
- subprocess.check_call(cmd)
-
-
-if __name__ == '__main__':
- ensure_egg_info()
- run_egg_info()
diff --git a/conftest.py b/conftest.py
deleted file mode 100644
index a513bb9e..00000000
--- a/conftest.py
+++ /dev/null
@@ -1 +0,0 @@
-pytest_plugins = 'setuptools.tests.fixtures'
diff --git a/docs/Makefile b/docs/Makefile
deleted file mode 100644
index 30bf10a9..00000000
--- a/docs/Makefile
+++ /dev/null
@@ -1,75 +0,0 @@
-# Makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line.
-SPHINXOPTS =
-SPHINXBUILD = sphinx-build
-PAPER =
-
-# Internal variables.
-PAPEROPT_a4 = -D latex_paper_size=a4
-PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
-
-.PHONY: help clean html web pickle htmlhelp latex changes linkcheck
-
-help:
- @echo "Please use \`make <target>' where <target> is one of"
- @echo " html to make standalone HTML files"
- @echo " pickle to make pickle files"
- @echo " json to make JSON files"
- @echo " htmlhelp to make HTML files and a HTML help project"
- @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
- @echo " changes to make an overview over all changed/added/deprecated items"
- @echo " linkcheck to check all external links for integrity"
-
-clean:
- -rm -rf build/*
-
-html:
- mkdir -p build/html build/doctrees
- $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) build/html
- @echo
- @echo "Build finished. The HTML pages are in build/html."
-
-pickle:
- mkdir -p build/pickle build/doctrees
- $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) build/pickle
- @echo
- @echo "Build finished; now you can process the pickle files."
-
-web: pickle
-
-json:
- mkdir -p build/json build/doctrees
- $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) build/json
- @echo
- @echo "Build finished; now you can process the JSON files."
-
-htmlhelp:
- mkdir -p build/htmlhelp build/doctrees
- $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) build/htmlhelp
- @echo
- @echo "Build finished; now you can run HTML Help Workshop with the" \
- ".hhp project file in build/htmlhelp."
-
-latex:
- mkdir -p build/latex build/doctrees
- $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) build/latex
- @echo
- @echo "Build finished; the LaTeX files are in build/latex."
- @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
- "run these through (pdf)latex."
-
-changes:
- mkdir -p build/changes build/doctrees
- $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) build/changes
- @echo
- @echo "The overview file is in build/changes."
-
-linkcheck:
- mkdir -p build/linkcheck build/doctrees
- $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) build/linkcheck
- @echo
- @echo "Link check complete; look for any errors in the above output " \
- "or in build/linkcheck/output.txt."
diff --git a/docs/_templates/indexsidebar.html b/docs/_templates/indexsidebar.html
deleted file mode 100644
index a27c85fe..00000000
--- a/docs/_templates/indexsidebar.html
+++ /dev/null
@@ -1,8 +0,0 @@
-<h3>Download</h3>
-
-<p>Current version: <b>{{ version }}</b></p>
-<p>Get Setuptools from the <a href="https://pypi.python.org/pypi/setuptools"> Python Package Index</a>
-
-<h3>Questions? Suggestions? Contributions?</h3>
-
-<p>Visit the <a href="https://bitbucket.org/pypa/setuptools">Setuptools project page</a> </p>
diff --git a/docs/_theme/nature/static/nature.css_t b/docs/_theme/nature/static/nature.css_t
deleted file mode 100644
index 1a654264..00000000
--- a/docs/_theme/nature/static/nature.css_t
+++ /dev/null
@@ -1,237 +0,0 @@
-/**
- * Sphinx stylesheet -- default theme
- * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- */
-
-@import url("basic.css");
-
-/* -- page layout ----------------------------------------------------------- */
-
-body {
- font-family: Arial, sans-serif;
- font-size: 100%;
- background-color: #111111;
- color: #555555;
- margin: 0;
- padding: 0;
-}
-
-div.documentwrapper {
- float: left;
- width: 100%;
-}
-
-div.bodywrapper {
- margin: 0 0 0 300px;
-}
-
-hr{
- border: 1px solid #B1B4B6;
-}
-
-div.document {
- background-color: #fafafa;
-}
-
-div.body {
- background-color: #ffffff;
- color: #3E4349;
- padding: 1em 30px 30px 30px;
- font-size: 0.9em;
-}
-
-div.footer {
- color: #555;
- width: 100%;
- padding: 13px 0;
- text-align: center;
- font-size: 75%;
-}
-
-div.footer a {
- color: #444444;
-}
-
-div.related {
- background-color: #6BA81E;
- line-height: 36px;
- color: #ffffff;
- text-shadow: 0px 1px 0 #444444;
- font-size: 1.1em;
-}
-
-div.related a {
- color: #E2F3CC;
-}
-
-div.related .right {
- font-size: 0.9em;
-}
-
-div.sphinxsidebar {
- font-size: 0.9em;
- line-height: 1.5em;
- width: 300px;
-}
-
-div.sphinxsidebarwrapper{
- padding: 20px 0;
-}
-
-div.sphinxsidebar h3,
-div.sphinxsidebar h4 {
- font-family: Arial, sans-serif;
- color: #222222;
- font-size: 1.2em;
- font-weight: bold;
- margin: 0;
- padding: 5px 10px;
- text-shadow: 1px 1px 0 white
-}
-
-div.sphinxsidebar h3 a {
- color: #444444;
-}
-
-div.sphinxsidebar p {
- color: #888888;
- padding: 5px 20px;
- margin: 0.5em 0px;
-}
-
-div.sphinxsidebar p.topless {
-}
-
-div.sphinxsidebar ul {
- margin: 10px 10px 10px 20px;
- padding: 0;
- color: #000000;
-}
-
-div.sphinxsidebar a {
- color: #444444;
-}
-
-div.sphinxsidebar a:hover {
- color: #E32E00;
-}
-
-div.sphinxsidebar input {
- border: 1px solid #cccccc;
- font-family: sans-serif;
- font-size: 1.1em;
- padding: 0.15em 0.3em;
-}
-
-div.sphinxsidebar input[type=text]{
- margin-left: 20px;
-}
-
-/* -- body styles ----------------------------------------------------------- */
-
-a {
- color: #005B81;
- text-decoration: none;
-}
-
-a:hover {
- color: #E32E00;
-}
-
-div.body h1,
-div.body h2,
-div.body h3,
-div.body h4,
-div.body h5,
-div.body h6 {
- font-family: Arial, sans-serif;
- font-weight: normal;
- color: #212224;
- margin: 30px 0px 10px 0px;
- padding: 5px 0 5px 0px;
- text-shadow: 0px 1px 0 white;
- border-bottom: 1px solid #C8D5E3;
-}
-
-div.body h1 { margin-top: 0; font-size: 200%; }
-div.body h2 { font-size: 150%; }
-div.body h3 { font-size: 120%; }
-div.body h4 { font-size: 110%; }
-div.body h5 { font-size: 100%; }
-div.body h6 { font-size: 100%; }
-
-a.headerlink {
- color: #c60f0f;
- font-size: 0.8em;
- padding: 0 4px 0 4px;
- text-decoration: none;
-}
-
-a.headerlink:hover {
- background-color: #c60f0f;
- color: white;
-}
-
-div.body p, div.body dd, div.body li {
- line-height: 1.8em;
-}
-
-div.admonition p.admonition-title + p {
- display: inline;
-}
-
-div.highlight{
- background-color: white;
-}
-
-div.note {
- background-color: #eeeeee;
- border: 1px solid #cccccc;
-}
-
-div.seealso {
- background-color: #ffffcc;
- border: 1px solid #ffff66;
-}
-
-div.topic {
- background-color: #fafafa;
- border-width: 0;
-}
-
-div.warning {
- background-color: #ffe4e4;
- border: 1px solid #ff6666;
-}
-
-p.admonition-title {
- display: inline;
-}
-
-p.admonition-title:after {
- content: ":";
-}
-
-pre {
- padding: 10px;
- background-color: #fafafa;
- color: #222222;
- line-height: 1.5em;
- font-size: 1.1em;
- margin: 1.5em 0 1.5em 0;
- -webkit-box-shadow: 0px 0px 4px #d8d8d8;
- -moz-box-shadow: 0px 0px 4px #d8d8d8;
- box-shadow: 0px 0px 4px #d8d8d8;
-}
-
-tt {
- color: #222222;
- padding: 1px 2px;
- font-size: 1.2em;
- font-family: monospace;
-}
-
-#table-of-contents ul {
- padding-left: 2em;
-}
-
diff --git a/docs/_theme/nature/static/pygments.css b/docs/_theme/nature/static/pygments.css
deleted file mode 100644
index 652b7612..00000000
--- a/docs/_theme/nature/static/pygments.css
+++ /dev/null
@@ -1,54 +0,0 @@
-.c { color: #999988; font-style: italic } /* Comment */
-.k { font-weight: bold } /* Keyword */
-.o { font-weight: bold } /* Operator */
-.cm { color: #999988; font-style: italic } /* Comment.Multiline */
-.cp { color: #999999; font-weight: bold } /* Comment.preproc */
-.c1 { color: #999988; font-style: italic } /* Comment.Single */
-.gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */
-.ge { font-style: italic } /* Generic.Emph */
-.gr { color: #aa0000 } /* Generic.Error */
-.gh { color: #999999 } /* Generic.Heading */
-.gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */
-.go { color: #111 } /* Generic.Output */
-.gp { color: #555555 } /* Generic.Prompt */
-.gs { font-weight: bold } /* Generic.Strong */
-.gu { color: #aaaaaa } /* Generic.Subheading */
-.gt { color: #aa0000 } /* Generic.Traceback */
-.kc { font-weight: bold } /* Keyword.Constant */
-.kd { font-weight: bold } /* Keyword.Declaration */
-.kp { font-weight: bold } /* Keyword.Pseudo */
-.kr { font-weight: bold } /* Keyword.Reserved */
-.kt { color: #445588; font-weight: bold } /* Keyword.Type */
-.m { color: #009999 } /* Literal.Number */
-.s { color: #bb8844 } /* Literal.String */
-.na { color: #008080 } /* Name.Attribute */
-.nb { color: #999999 } /* Name.Builtin */
-.nc { color: #445588; font-weight: bold } /* Name.Class */
-.no { color: #ff99ff } /* Name.Constant */
-.ni { color: #800080 } /* Name.Entity */
-.ne { color: #990000; font-weight: bold } /* Name.Exception */
-.nf { color: #990000; font-weight: bold } /* Name.Function */
-.nn { color: #555555 } /* Name.Namespace */
-.nt { color: #000080 } /* Name.Tag */
-.nv { color: purple } /* Name.Variable */
-.ow { font-weight: bold } /* Operator.Word */
-.mf { color: #009999 } /* Literal.Number.Float */
-.mh { color: #009999 } /* Literal.Number.Hex */
-.mi { color: #009999 } /* Literal.Number.Integer */
-.mo { color: #009999 } /* Literal.Number.Oct */
-.sb { color: #bb8844 } /* Literal.String.Backtick */
-.sc { color: #bb8844 } /* Literal.String.Char */
-.sd { color: #bb8844 } /* Literal.String.Doc */
-.s2 { color: #bb8844 } /* Literal.String.Double */
-.se { color: #bb8844 } /* Literal.String.Escape */
-.sh { color: #bb8844 } /* Literal.String.Heredoc */
-.si { color: #bb8844 } /* Literal.String.Interpol */
-.sx { color: #bb8844 } /* Literal.String.Other */
-.sr { color: #808000 } /* Literal.String.Regex */
-.s1 { color: #bb8844 } /* Literal.String.Single */
-.ss { color: #bb8844 } /* Literal.String.Symbol */
-.bp { color: #999999 } /* Name.Builtin.Pseudo */
-.vc { color: #ff99ff } /* Name.Variable.Class */
-.vg { color: #ff99ff } /* Name.Variable.Global */
-.vi { color: #ff99ff } /* Name.Variable.Instance */
-.il { color: #009999 } /* Literal.Number.Integer.Long */ \ No newline at end of file
diff --git a/docs/_theme/nature/theme.conf b/docs/_theme/nature/theme.conf
deleted file mode 100644
index 1cc40044..00000000
--- a/docs/_theme/nature/theme.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[theme]
-inherit = basic
-stylesheet = nature.css
-pygments_style = tango
diff --git a/docs/conf.py b/docs/conf.py
deleted file mode 100644
index f315e2b7..00000000
--- a/docs/conf.py
+++ /dev/null
@@ -1,263 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Setuptools documentation build configuration file, created by
-# sphinx-quickstart on Fri Jul 17 14:22:37 2009.
-#
-# This file is execfile()d with the current directory set to its containing dir.
-#
-# The contents of this file are pickled, so don't put values in the namespace
-# that aren't pickleable (module imports are okay, they're removed automatically).
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-
-# Allow Sphinx to find the setup command that is imported below, as referenced above.
-import sys, os
-sys.path.append(os.path.abspath('..'))
-
-import setup as setup_script
-
-# -- General configuration -----------------------------------------------------
-
-# Add any Sphinx extension module names here, as strings. They can be extensions
-# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['rst.linker']
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
-
-# The suffix of source filenames.
-source_suffix = '.txt'
-
-# The encoding of source files.
-#source_encoding = 'utf-8'
-
-# The master toctree document.
-master_doc = 'index'
-
-# General information about the project.
-project = 'Setuptools'
-copyright = '2009-2014, The fellowship of the packaging'
-
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The short X.Y version.
-version = setup_script.setup_params['version']
-# The full version, including alpha/beta/rc tags.
-release = setup_script.setup_params['version']
-
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-#language = None
-
-# There are two options for replacing |today|: either, you set today to some
-# non-false value, then it is used:
-#today = ''
-# Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
-
-# List of documents that shouldn't be included in the build.
-#unused_docs = []
-
-# List of directories, relative to source directory, that shouldn't be searched
-# for source files.
-exclude_trees = []
-
-# The reST default role (used for this markup: `text`) to use for all documents.
-#default_role = None
-
-# If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
-
-# If true, the current module name will be prepended to all description
-# unit titles (such as .. function::).
-#add_module_names = True
-
-# If true, sectionauthor and moduleauthor directives will be shown in the
-# output. They are ignored by default.
-#show_authors = False
-
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
-
-# A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
-
-
-# -- Options for HTML output ---------------------------------------------------
-
-# The theme to use for HTML and HTML Help pages. Major themes that come with
-# Sphinx are currently 'default' and 'sphinxdoc'.
-html_theme = 'nature'
-
-# Theme options are theme-specific and customize the look and feel of a theme
-# further. For a list of options available for each theme, see the
-# documentation.
-#html_theme_options = {}
-
-# Add any paths that contain custom themes here, relative to this directory.
-html_theme_path = ['_theme']
-
-# The name for this set of Sphinx documents. If None, it defaults to
-# "<project> v<release> documentation".
-html_title = "Setuptools documentation"
-
-# A shorter title for the navigation bar. Default is the same as html_title.
-html_short_title = "Setuptools"
-
-# The name of an image file (relative to this directory) to place at the top
-# of the sidebar.
-#html_logo = None
-
-# The name of an image file (within the static path) to use as favicon of the
-# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
-# pixels large.
-#html_favicon = None
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-#html_static_path = ['_static']
-
-# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
-# using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
-
-# If true, SmartyPants will be used to convert quotes and dashes to
-# typographically correct entities.
-html_use_smartypants = True
-
-# Custom sidebar templates, maps document names to template names.
-html_sidebars = {'index': 'indexsidebar.html'}
-
-# Additional templates that should be rendered to pages, maps page names to
-# template names.
-#html_additional_pages = {}
-
-# If false, no module index is generated.
-html_use_modindex = False
-
-# If false, no index is generated.
-html_use_index = False
-
-# If true, the index is split into individual pages for each letter.
-#html_split_index = False
-
-# If true, links to the reST sources are added to the pages.
-#html_show_sourcelink = True
-
-# If true, an OpenSearch description file will be output, and all pages will
-# contain a <link> tag referring to it. The value of this option must be the
-# base URL from which the finished HTML is served.
-#html_use_opensearch = ''
-
-# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = ''
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = 'Setuptoolsdoc'
-
-
-# -- Options for LaTeX output --------------------------------------------------
-
-# The paper size ('letter' or 'a4').
-#latex_paper_size = 'letter'
-
-# The font size ('10pt', '11pt' or '12pt').
-#latex_font_size = '10pt'
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title, author, documentclass [howto/manual]).
-latex_documents = [
- ('index', 'Setuptools.tex', 'Setuptools Documentation',
- 'The fellowship of the packaging', 'manual'),
-]
-
-# The name of an image file (relative to this directory) to place at the top of
-# the title page.
-#latex_logo = None
-
-# For "manual" documents, if this is true, then toplevel headings are parts,
-# not chapters.
-#latex_use_parts = False
-
-# Additional stuff for the LaTeX preamble.
-#latex_preamble = ''
-
-# Documents to append as an appendix to all manuals.
-#latex_appendices = []
-
-# If false, no module index is generated.
-#latex_use_modindex = True
-
-link_files = {
- 'CHANGES.rst': dict(
- using=dict(
- BB='https://bitbucket.org',
- GH='https://github.com',
- ),
- replace=[
- dict(
- pattern=r"(Issue )?#(?P<issue>\d+)",
- url='{GH}/pypa/setuptools/issues/{issue}',
- ),
- dict(
- pattern=r"BB Pull Request ?#(?P<bb_pull_request>\d+)",
- url='{BB}/pypa/setuptools/pull-request/{bb_pull_request}',
- ),
- dict(
- pattern=r"Distribute #(?P<distribute>\d+)",
- url='{BB}/tarek/distribute/issue/{distribute}',
- ),
- dict(
- pattern=r"Buildout #(?P<buildout>\d+)",
- url='{GH}/buildout/buildout/issues/{buildout}',
- ),
- dict(
- pattern=r"Old Setuptools #(?P<old_setuptools>\d+)",
- url='http://bugs.python.org/setuptools/issue{old_setuptools}',
- ),
- dict(
- pattern=r"Jython #(?P<jython>\d+)",
- url='http://bugs.jython.org/issue{jython}',
- ),
- dict(
- pattern=r"Python #(?P<python>\d+)",
- url='http://bugs.python.org/issue{python}',
- ),
- dict(
- pattern=r"Interop #(?P<interop>\d+)",
- url='{GH}/pypa/interoperability-peps/issues/{interop}',
- ),
- dict(
- pattern=r"Pip #(?P<pip>\d+)",
- url='{GH}/pypa/pip/issues/{pip}',
- ),
- dict(
- pattern=r"Packaging #(?P<packaging>\d+)",
- url='{GH}/pypa/packaging/issues/{packaging}',
- ),
- dict(
- pattern=r"[Pp]ackaging (?P<packaging_ver>\d+(\.\d+)+)",
- url='{GH}/pypa/packaging/blob/{packaging_ver}/CHANGELOG.rst',
- ),
- dict(
- pattern=r"PEP[- ](?P<pep_number>\d+)",
- url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',
- ),
- dict(
- pattern=r"^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n",
- with_scm="{text}\n{rev[timestamp]:%d %b %Y}\n",
- ),
- ],
- ),
-}
diff --git a/docs/developer-guide.txt b/docs/developer-guide.txt
deleted file mode 100644
index 7cd3c6d2..00000000
--- a/docs/developer-guide.txt
+++ /dev/null
@@ -1,125 +0,0 @@
-================================
-Developer's Guide for Setuptools
-================================
-
-If you want to know more about contributing on Setuptools, this is the place.
-
-
-.. contents:: **Table of Contents**
-
-
--------------------
-Recommended Reading
--------------------
-
-Please read `How to write the perfect pull request
-<http://blog.jaraco.com/2014/04/how-to-write-perfect-pull-request.html>`_
-for some tips on contributing to open source projects. Although the article
-is not authoritative, it was authored by the maintainer of Setuptools, so
-reflects his opinions and will improve the likelihood of acceptance and
-quality of contribution.
-
-------------------
-Project Management
-------------------
-
-Setuptools is maintained primarily in Github at `this home
-<https://github.com/pypa/setuptools>`_. Setuptools is maintained under the
-Python Packaging Authority (PyPA) with several core contributors. All bugs
-for Setuptools are filed and the canonical source is maintained in Github.
-
-User support and discussions are done through the issue tracker (for specific)
-issues, through the distutils-sig mailing list, or on IRC (Freenode) at
-#pypa.
-
-Discussions about development happen on the pypa-dev mailing list or on IRC
-(Freenode) at #pypa-dev.
-
------------------
-Authoring Tickets
------------------
-
-Before authoring any source code, it's often prudent to file a ticket
-describing the motivation behind making changes. First search to see if a
-ticket already exists for your issue. If not, create one. Try to think from
-the perspective of the reader. Explain what behavior you expected, what you
-got instead, and what factors might have contributed to the unexpected
-behavior. In Github, surround a block of code or traceback with the triple
-backtick "\`\`\`" so that it is formatted nicely.
-
-Filing a ticket provides a forum for justification, discussion, and
-clarification. The ticket provides a record of the purpose for the change and
-any hard decisions that were made. It provides a single place for others to
-reference when trying to understand why the software operates the way it does
-or why certain changes were made.
-
-Setuptools makes extensive use of hyperlinks to tickets in the changelog so
-that system integrators and other users can get a quick summary, but then
-jump to the in-depth discussion about any subject referenced.
-
------------
-Source Code
------------
-
-Grab the code at Github::
-
- $ git checkout https://github.com/pypa/setuptools
-
-If you want to contribute changes, we recommend you fork the repository on
-Github, commit the changes to your repository, and then make a pull request
-on Github. If you make some changes, don't forget to:
-
-- add a note in CHANGES.rst
-
-Please commit all changes in the 'master' branch against the latest available
-commit or for bug-fixes, against an earlier commit or release in which the
-bug occurred.
-
-If you find yourself working on more than one issue at a time, Setuptools
-generally prefers Git-style branches, so use Mercurial bookmarks or Git
-branches or multiple forks to maintain separate efforts.
-
-The Continuous Integration tests that validate every release are run
-from this repository.
-
-For posterity, the old `Bitbucket mirror
-<https://bitbucket.org/pypa/setuptools>`_ is available.
-
--------
-Testing
--------
-
-The primary tests are run using py.test. To run the tests::
-
- $ python setup.py test
-
-Or install py.test into your environment and run ``PYTHONPATH=. py.test``
-or ``python -m pytest``.
-
-Under continuous integration, additional tests may be run. See the
-``.travis.yml`` file for full details on the tests run under Travis-CI.
-
--------------------
-Semantic Versioning
--------------------
-
-Setuptools follows ``semver``.
-
-.. explain value of reflecting meaning in versions.
-
-----------------------
-Building Documentation
-----------------------
-
-Setuptools relies on the Sphinx system for building documentation and in
-particular the ``build_sphinx`` distutils command. To build the
-documentation, invoke::
-
- python setup.py build_sphinx
-
-from the root of the repository. Setuptools will download a compatible
-build of Sphinx and any requisite plugins and then build the
-documentation in the build/sphinx directory.
-
-Setuptools does not support invoking the doc builder from the docs/
-directory as some tools expect.
diff --git a/docs/development.txt b/docs/development.txt
deleted file mode 100644
index 455f038a..00000000
--- a/docs/development.txt
+++ /dev/null
@@ -1,35 +0,0 @@
--------------------------
-Development on Setuptools
--------------------------
-
-Setuptools is maintained by the Python community under the Python Packaging
-Authority (PyPA) and led by Jason R. Coombs.
-
-This document describes the process by which Setuptools is developed.
-This document assumes the reader has some passing familiarity with
-*using* setuptools, the ``pkg_resources`` module, and EasyInstall. It
-does not attempt to explain basic concepts like inter-project
-dependencies, nor does it contain detailed lexical syntax for most
-file formats. Neither does it explain concepts like "namespace
-packages" or "resources" in any detail, as all of these subjects are
-covered at length in the setuptools developer's guide and the
-``pkg_resources`` reference manual.
-
-Instead, this is **internal** documentation for how those concepts and
-features are *implemented* in concrete terms. It is intended for people
-who are working on the setuptools code base, who want to be able to
-troubleshoot setuptools problems, want to write code that reads the file
-formats involved, or want to otherwise tinker with setuptools-generated
-files and directories.
-
-Note, however, that these are all internal implementation details and
-are therefore subject to change; stick to the published API if you don't
-want to be responsible for keeping your code from breaking when
-setuptools changes. You have been warned.
-
-.. toctree::
- :maxdepth: 1
-
- developer-guide
- formats
- releases
diff --git a/docs/easy_install.txt b/docs/easy_install.txt
deleted file mode 100644
index 8dd176fd..00000000
--- a/docs/easy_install.txt
+++ /dev/null
@@ -1,1625 +0,0 @@
-============
-Easy Install
-============
-
-Easy Install is a python module (``easy_install``) bundled with ``setuptools``
-that lets you automatically download, build, install, and manage Python
-packages.
-
-Please share your experiences with us! If you encounter difficulty installing
-a package, please contact us via the `distutils mailing list
-<http://mail.python.org/pipermail/distutils-sig/>`_. (Note: please DO NOT send
-private email directly to the author of setuptools; it will be discarded. The
-mailing list is a searchable archive of previously-asked and answered
-questions; you should begin your research there before reporting something as a
-bug -- and then do so via list discussion first.)
-
-(Also, if you'd like to learn about how you can use ``setuptools`` to make your
-own packages work better with EasyInstall, or provide EasyInstall-like features
-without requiring your users to use EasyInstall directly, you'll probably want
-to check out the full `setuptools`_ documentation as well.)
-
-.. contents:: **Table of Contents**
-
-
-Using "Easy Install"
-====================
-
-
-.. _installation instructions:
-
-Installing "Easy Install"
--------------------------
-
-Please see the `setuptools PyPI page <https://pypi.python.org/pypi/setuptools>`_
-for download links and basic installation instructions for each of the
-supported platforms.
-
-You will need at least Python 2.6. An ``easy_install`` script will be
-installed in the normal location for Python scripts on your platform.
-
-Note that the instructions on the setuptools PyPI page assume that you are
-are installing to Python's primary ``site-packages`` directory. If this is
-not the case, you should consult the section below on `Custom Installation
-Locations`_ before installing. (And, on Windows, you should not use the
-``.exe`` installer when installing to an alternate location.)
-
-Note that ``easy_install`` normally works by downloading files from the
-internet. If you are behind an NTLM-based firewall that prevents Python
-programs from accessing the net directly, you may wish to first install and use
-the `APS proxy server <http://ntlmaps.sf.net/>`_, which lets you get past such
-firewalls in the same way that your web browser(s) do.
-
-(Alternately, if you do not wish easy_install to actually download anything, you
-can restrict it from doing so with the ``--allow-hosts`` option; see the
-sections on `restricting downloads with --allow-hosts`_ and `command-line
-options`_ for more details.)
-
-
-Troubleshooting
-~~~~~~~~~~~~~~~
-
-If EasyInstall/setuptools appears to install correctly, and you can run the
-``easy_install`` command but it fails with an ``ImportError``, the most likely
-cause is that you installed to a location other than ``site-packages``,
-without taking any of the steps described in the `Custom Installation
-Locations`_ section below. Please see that section and follow the steps to
-make sure that your custom location will work correctly. Then re-install.
-
-Similarly, if you can run ``easy_install``, and it appears to be installing
-packages, but then you can't import them, the most likely issue is that you
-installed EasyInstall correctly but are using it to install packages to a
-non-standard location that hasn't been properly prepared. Again, see the
-section on `Custom Installation Locations`_ for more details.
-
-
-Windows Notes
-~~~~~~~~~~~~~
-
-Installing setuptools will provide an ``easy_install`` command according to
-the techniques described in `Executables and Launchers`_. If the
-``easy_install`` command is not available after installation, that section
-provides details on how to configure Windows to make the commands available.
-
-
-Downloading and Installing a Package
-------------------------------------
-
-For basic use of ``easy_install``, you need only supply the filename or URL of
-a source distribution or .egg file (`Python Egg`__).
-
-__ http://peak.telecommunity.com/DevCenter/PythonEggs
-
-**Example 1**. Install a package by name, searching PyPI for the latest
-version, and automatically downloading, building, and installing it::
-
- easy_install SQLObject
-
-**Example 2**. Install or upgrade a package by name and version by finding
-links on a given "download page"::
-
- easy_install -f http://pythonpaste.org/package_index.html SQLObject
-
-**Example 3**. Download a source distribution from a specified URL,
-automatically building and installing it::
-
- easy_install http://example.com/path/to/MyPackage-1.2.3.tgz
-
-**Example 4**. Install an already-downloaded .egg file::
-
- easy_install /my_downloads/OtherPackage-3.2.1-py2.3.egg
-
-**Example 5**. Upgrade an already-installed package to the latest version
-listed on PyPI::
-
- easy_install --upgrade PyProtocols
-
-**Example 6**. Install a source distribution that's already downloaded and
-extracted in the current directory (New in 0.5a9)::
-
- easy_install .
-
-**Example 7**. (New in 0.6a1) Find a source distribution or Subversion
-checkout URL for a package, and extract it or check it out to
-``~/projects/sqlobject`` (the name will always be in all-lowercase), where it
-can be examined or edited. (The package will not be installed, but it can
-easily be installed with ``easy_install ~/projects/sqlobject``. See `Editing
-and Viewing Source Packages`_ below for more info.)::
-
- easy_install --editable --build-directory ~/projects SQLObject
-
-**Example 7**. (New in 0.6.11) Install a distribution within your home dir::
-
- easy_install --user SQLAlchemy
-
-Easy Install accepts URLs, filenames, PyPI package names (i.e., ``distutils``
-"distribution" names), and package+version specifiers. In each case, it will
-attempt to locate the latest available version that meets your criteria.
-
-When downloading or processing downloaded files, Easy Install recognizes
-distutils source distribution files with extensions of .tgz, .tar, .tar.gz,
-.tar.bz2, or .zip. And of course it handles already-built .egg
-distributions as well as ``.win32.exe`` installers built using distutils.
-
-By default, packages are installed to the running Python installation's
-``site-packages`` directory, unless you provide the ``-d`` or ``--install-dir``
-option to specify an alternative directory, or specify an alternate location
-using distutils configuration files. (See `Configuration Files`_, below.)
-
-By default, any scripts included with the package are installed to the running
-Python installation's standard script installation location. However, if you
-specify an installation directory via the command line or a config file, then
-the default directory for installing scripts will be the same as the package
-installation directory, to ensure that the script will have access to the
-installed package. You can override this using the ``-s`` or ``--script-dir``
-option.
-
-Installed packages are added to an ``easy-install.pth`` file in the install
-directory, so that Python will always use the most-recently-installed version
-of the package. If you would like to be able to select which version to use at
-runtime, you should use the ``-m`` or ``--multi-version`` option.
-
-
-Upgrading a Package
--------------------
-
-You don't need to do anything special to upgrade a package: just install the
-new version, either by requesting a specific version, e.g.::
-
- easy_install "SomePackage==2.0"
-
-a version greater than the one you have now::
-
- easy_install "SomePackage>2.0"
-
-using the upgrade flag, to find the latest available version on PyPI::
-
- easy_install --upgrade SomePackage
-
-or by using a download page, direct download URL, or package filename::
-
- easy_install -f http://example.com/downloads ExamplePackage
-
- easy_install http://example.com/downloads/ExamplePackage-2.0-py2.4.egg
-
- easy_install my_downloads/ExamplePackage-2.0.tgz
-
-If you're using ``-m`` or ``--multi-version`` , using the ``require()``
-function at runtime automatically selects the newest installed version of a
-package that meets your version criteria. So, installing a newer version is
-the only step needed to upgrade such packages.
-
-If you're installing to a directory on PYTHONPATH, or a configured "site"
-directory (and not using ``-m``), installing a package automatically replaces
-any previous version in the ``easy-install.pth`` file, so that Python will
-import the most-recently installed version by default. So, again, installing
-the newer version is the only upgrade step needed.
-
-If you haven't suppressed script installation (using ``--exclude-scripts`` or
-``-x``), then the upgraded version's scripts will be installed, and they will
-be automatically patched to ``require()`` the corresponding version of the
-package, so that you can use them even if they are installed in multi-version
-mode.
-
-``easy_install`` never actually deletes packages (unless you're installing a
-package with the same name and version number as an existing package), so if
-you want to get rid of older versions of a package, please see `Uninstalling
-Packages`_, below.
-
-
-Changing the Active Version
----------------------------
-
-If you've upgraded a package, but need to revert to a previously-installed
-version, you can do so like this::
-
- easy_install PackageName==1.2.3
-
-Where ``1.2.3`` is replaced by the exact version number you wish to switch to.
-If a package matching the requested name and version is not already installed
-in a directory on ``sys.path``, it will be located via PyPI and installed.
-
-If you'd like to switch to the latest installed version of ``PackageName``, you
-can do so like this::
-
- easy_install PackageName
-
-This will activate the latest installed version. (Note: if you have set any
-``find_links`` via distutils configuration files, those download pages will be
-checked for the latest available version of the package, and it will be
-downloaded and installed if it is newer than your current version.)
-
-Note that changing the active version of a package will install the newly
-active version's scripts, unless the ``--exclude-scripts`` or ``-x`` option is
-specified.
-
-
-Uninstalling Packages
----------------------
-
-If you have replaced a package with another version, then you can just delete
-the package(s) you don't need by deleting the PackageName-versioninfo.egg file
-or directory (found in the installation directory).
-
-If you want to delete the currently installed version of a package (or all
-versions of a package), you should first run::
-
- easy_install -m PackageName
-
-This will ensure that Python doesn't continue to search for a package you're
-planning to remove. After you've done this, you can safely delete the .egg
-files or directories, along with any scripts you wish to remove.
-
-
-Managing Scripts
-----------------
-
-Whenever you install, upgrade, or change versions of a package, EasyInstall
-automatically installs the scripts for the selected package version, unless
-you tell it not to with ``-x`` or ``--exclude-scripts``. If any scripts in
-the script directory have the same name, they are overwritten.
-
-Thus, you do not normally need to manually delete scripts for older versions of
-a package, unless the newer version of the package does not include a script
-of the same name. However, if you are completely uninstalling a package, you
-may wish to manually delete its scripts.
-
-EasyInstall's default behavior means that you can normally only run scripts
-from one version of a package at a time. If you want to keep multiple versions
-of a script available, however, you can simply use the ``--multi-version`` or
-``-m`` option, and rename the scripts that EasyInstall creates. This works
-because EasyInstall installs scripts as short code stubs that ``require()`` the
-matching version of the package the script came from, so renaming the script
-has no effect on what it executes.
-
-For example, suppose you want to use two versions of the ``rst2html`` tool
-provided by the `docutils <http://docutils.sf.net/>`_ package. You might
-first install one version::
-
- easy_install -m docutils==0.3.9
-
-then rename the ``rst2html.py`` to ``r2h_039``, and install another version::
-
- easy_install -m docutils==0.3.10
-
-This will create another ``rst2html.py`` script, this one using docutils
-version 0.3.10 instead of 0.3.9. You now have two scripts, each using a
-different version of the package. (Notice that we used ``-m`` for both
-installations, so that Python won't lock us out of using anything but the most
-recently-installed version of the package.)
-
-
-Executables and Launchers
--------------------------
-
-On Unix systems, scripts are installed with as natural files with a "#!"
-header and no extension and they launch under the Python version indicated in
-the header.
-
-On Windows, there is no mechanism to "execute" files without extensions, so
-EasyInstall provides two techniques to mirror the Unix behavior. The behavior
-is indicated by the SETUPTOOLS_LAUNCHER environment variable, which may be
-"executable" (default) or "natural".
-
-Regardless of the technique used, the script(s) will be installed to a Scripts
-directory (by default in the Python installation directory). It is recommended
-for EasyInstall that you ensure this directory is in the PATH environment
-variable. The easiest way to ensure the Scripts directory is in the PATH is
-to run ``Tools\Scripts\win_add2path.py`` from the Python directory (requires
-Python 2.6 or later).
-
-Note that instead of changing your ``PATH`` to include the Python scripts
-directory, you can also retarget the installation location for scripts so they
-go on a directory that's already on the ``PATH``. For more information see
-`Command-Line Options`_ and `Configuration Files`_. During installation,
-pass command line options (such as ``--script-dir``) to
-``ez_setup.py`` to control where ``easy_install.exe`` will be installed.
-
-
-Windows Executable Launcher
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-If the "executable" launcher is used, EasyInstall will create a '.exe'
-launcher of the same name beside each installed script (including
-``easy_install`` itself). These small .exe files launch the script of the
-same name using the Python version indicated in the '#!' header.
-
-This behavior is currently default. To force
-the use of executable launchers, set ``SETUPTOOLS_LAUNCHER`` to "executable".
-
-Natural Script Launcher
-~~~~~~~~~~~~~~~~~~~~~~~
-
-EasyInstall also supports deferring to an external launcher such as
-`pylauncher <https://bitbucket.org/pypa/pylauncher>`_ for launching scripts.
-Enable this experimental functionality by setting the
-``SETUPTOOLS_LAUNCHER`` environment variable to "natural". EasyInstall will
-then install scripts as simple
-scripts with a .pya (or .pyw) extension appended. If these extensions are
-associated with the pylauncher and listed in the PATHEXT environment variable,
-these scripts can then be invoked simply and directly just like any other
-executable. This behavior may become default in a future version.
-
-EasyInstall uses the .pya extension instead of simply
-the typical '.py' extension. This distinct extension is necessary to prevent
-Python
-from treating the scripts as importable modules (where name conflicts exist).
-Current releases of pylauncher do not yet associate with .pya files by
-default, but future versions should do so.
-
-
-Tips & Techniques
------------------
-
-Multiple Python Versions
-~~~~~~~~~~~~~~~~~~~~~~~~
-
-EasyInstall installs itself under two names:
-``easy_install`` and ``easy_install-N.N``, where ``N.N`` is the Python version
-used to install it. Thus, if you install EasyInstall for both Python 3.2 and
-2.7, you can use the ``easy_install-3.2`` or ``easy_install-2.7`` scripts to
-install packages for the respective Python version.
-
-Setuptools also supplies easy_install as a runnable module which may be
-invoked using ``python -m easy_install`` for any Python with Setuptools
-installed.
-
-Restricting Downloads with ``--allow-hosts``
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-You can use the ``--allow-hosts`` (``-H``) option to restrict what domains
-EasyInstall will look for links and downloads on. ``--allow-hosts=None``
-prevents downloading altogether. You can also use wildcards, for example
-to restrict downloading to hosts in your own intranet. See the section below
-on `Command-Line Options`_ for more details on the ``--allow-hosts`` option.
-
-By default, there are no host restrictions in effect, but you can change this
-default by editing the appropriate `configuration files`_ and adding:
-
-.. code-block:: ini
-
- [easy_install]
- allow_hosts = *.myintranet.example.com,*.python.org
-
-The above example would then allow downloads only from hosts in the
-``python.org`` and ``myintranet.example.com`` domains, unless overridden on the
-command line.
-
-
-Installing on Un-networked Machines
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Just copy the eggs or source packages you need to a directory on the target
-machine, then use the ``-f`` or ``--find-links`` option to specify that
-directory's location. For example::
-
- easy_install -H None -f somedir SomePackage
-
-will attempt to install SomePackage using only eggs and source packages found
-in ``somedir`` and disallowing all remote access. You should of course make
-sure you have all of SomePackage's dependencies available in somedir.
-
-If you have another machine of the same operating system and library versions
-(or if the packages aren't platform-specific), you can create the directory of
-eggs using a command like this::
-
- easy_install -zmaxd somedir SomePackage
-
-This will tell EasyInstall to put zipped eggs or source packages for
-SomePackage and all its dependencies into ``somedir``, without creating any
-scripts or .pth files. You can then copy the contents of ``somedir`` to the
-target machine. (``-z`` means zipped eggs, ``-m`` means multi-version, which
-prevents .pth files from being used, ``-a`` means to copy all the eggs needed,
-even if they're installed elsewhere on the machine, and ``-d`` indicates the
-directory to place the eggs in.)
-
-You can also build the eggs from local development packages that were installed
-with the ``setup.py develop`` command, by including the ``-l`` option, e.g.::
-
- easy_install -zmaxld somedir SomePackage
-
-This will use locally-available source distributions to build the eggs.
-
-
-Packaging Others' Projects As Eggs
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Need to distribute a package that isn't published in egg form? You can use
-EasyInstall to build eggs for a project. You'll want to use the ``--zip-ok``,
-``--exclude-scripts``, and possibly ``--no-deps`` options (``-z``, ``-x`` and
-``-N``, respectively). Use ``-d`` or ``--install-dir`` to specify the location
-where you'd like the eggs placed. By placing them in a directory that is
-published to the web, you can then make the eggs available for download, either
-in an intranet or to the internet at large.
-
-If someone distributes a package in the form of a single ``.py`` file, you can
-wrap it in an egg by tacking an ``#egg=name-version`` suffix on the file's URL.
-So, something like this::
-
- easy_install -f "http://some.example.com/downloads/foo.py#egg=foo-1.0" foo
-
-will install the package as an egg, and this::
-
- easy_install -zmaxd. \
- -f "http://some.example.com/downloads/foo.py#egg=foo-1.0" foo
-
-will create a ``.egg`` file in the current directory.
-
-
-Creating your own Package Index
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-In addition to local directories and the Python Package Index, EasyInstall can
-find download links on most any web page whose URL is given to the ``-f``
-(``--find-links``) option. In the simplest case, you can simply have a web
-page with links to eggs or Python source packages, even an automatically
-generated directory listing (such as the Apache web server provides).
-
-If you are setting up an intranet site for package downloads, you may want to
-configure the target machines to use your download site by default, adding
-something like this to their `configuration files`_:
-
-.. code-block:: ini
-
- [easy_install]
- find_links = http://mypackages.example.com/somedir/
- http://turbogears.org/download/
- http://peak.telecommunity.com/dist/
-
-As you can see, you can list multiple URLs separated by whitespace, continuing
-on multiple lines if necessary (as long as the subsequent lines are indented.
-
-If you are more ambitious, you can also create an entirely custom package index
-or PyPI mirror. See the ``--index-url`` option under `Command-Line Options`_,
-below, and also the section on `Package Index "API"`_.
-
-
-Password-Protected Sites
-------------------------
-
-If a site you want to download from is password-protected using HTTP "Basic"
-authentication, you can specify your credentials in the URL, like so::
-
- http://some_userid:some_password@some.example.com/some_path/
-
-You can do this with both index page URLs and direct download URLs. As long
-as any HTML pages read by easy_install use *relative* links to point to the
-downloads, the same user ID and password will be used to do the downloading.
-
-Using .pypirc Credentials
--------------------------
-
-In additional to supplying credentials in the URL, ``easy_install`` will also
-honor credentials if present in the .pypirc file. Teams maintaining a private
-repository of packages may already have defined access credentials for
-uploading packages according to the distutils documentation. ``easy_install``
-will attempt to honor those if present. Refer to the distutils documentation
-for Python 2.5 or later for details on the syntax.
-
-Controlling Build Options
-~~~~~~~~~~~~~~~~~~~~~~~~~
-
-EasyInstall respects standard distutils `Configuration Files`_, so you can use
-them to configure build options for packages that it installs from source. For
-example, if you are on Windows using the MinGW compiler, you can configure the
-default compiler by putting something like this:
-
-.. code-block:: ini
-
- [build]
- compiler = mingw32
-
-into the appropriate distutils configuration file. In fact, since this is just
-normal distutils configuration, it will affect any builds using that config
-file, not just ones done by EasyInstall. For example, if you add those lines
-to ``distutils.cfg`` in the ``distutils`` package directory, it will be the
-default compiler for *all* packages you build. See `Configuration Files`_
-below for a list of the standard configuration file locations, and links to
-more documentation on using distutils configuration files.
-
-
-Editing and Viewing Source Packages
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Sometimes a package's source distribution contains additional documentation,
-examples, configuration files, etc., that are not part of its actual code. If
-you want to be able to examine these files, you can use the ``--editable``
-option to EasyInstall, and EasyInstall will look for a source distribution
-or Subversion URL for the package, then download and extract it or check it out
-as a subdirectory of the ``--build-directory`` you specify. If you then wish
-to install the package after editing or configuring it, you can do so by
-rerunning EasyInstall with that directory as the target.
-
-Note that using ``--editable`` stops EasyInstall from actually building or
-installing the package; it just finds, obtains, and possibly unpacks it for
-you. This allows you to make changes to the package if necessary, and to
-either install it in development mode using ``setup.py develop`` (if the
-package uses setuptools, that is), or by running ``easy_install projectdir``
-(where ``projectdir`` is the subdirectory EasyInstall created for the
-downloaded package.
-
-In order to use ``--editable`` (``-e`` for short), you *must* also supply a
-``--build-directory`` (``-b`` for short). The project will be placed in a
-subdirectory of the build directory. The subdirectory will have the same
-name as the project itself, but in all-lowercase. If a file or directory of
-that name already exists, EasyInstall will print an error message and exit.
-
-Also, when using ``--editable``, you cannot use URLs or filenames as arguments.
-You *must* specify project names (and optional version requirements) so that
-EasyInstall knows what directory name(s) to create. If you need to force
-EasyInstall to use a particular URL or filename, you should specify it as a
-``--find-links`` item (``-f`` for short), and then also specify
-the project name, e.g.::
-
- easy_install -eb ~/projects \
- -fhttp://prdownloads.sourceforge.net/ctypes/ctypes-0.9.6.tar.gz?download \
- ctypes==0.9.6
-
-
-Dealing with Installation Conflicts
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-(NOTE: As of 0.6a11, this section is obsolete; it is retained here only so that
-people using older versions of EasyInstall can consult it. As of version
-0.6a11, installation conflicts are handled automatically without deleting the
-old or system-installed packages, and without ignoring the issue. Instead,
-eggs are automatically shifted to the front of ``sys.path`` using special
-code added to the ``easy-install.pth`` file. So, if you are using version
-0.6a11 or better of setuptools, you do not need to worry about conflicts,
-and the following issues do not apply to you.)
-
-EasyInstall installs distributions in a "managed" way, such that each
-distribution can be independently activated or deactivated on ``sys.path``.
-However, packages that were not installed by EasyInstall are "unmanaged",
-in that they usually live all in one directory and cannot be independently
-activated or deactivated.
-
-As a result, if you are using EasyInstall to upgrade an existing package, or
-to install a package with the same name as an existing package, EasyInstall
-will warn you of the conflict. (This is an improvement over ``setup.py
-install``, becuase the ``distutils`` just install new packages on top of old
-ones, possibly combining two unrelated packages or leaving behind modules that
-have been deleted in the newer version of the package.)
-
-EasyInstall will stop the installation if it detects a conflict
-between an existing, "unmanaged" package, and a module or package in any of
-the distributions you're installing. It will display a list of all of the
-existing files and directories that would need to be deleted for the new
-package to be able to function correctly. To proceed, you must manually
-delete these conflicting files and directories and re-run EasyInstall.
-
-Of course, once you've replaced all of your existing "unmanaged" packages with
-versions managed by EasyInstall, you won't have any more conflicts to worry
-about!
-
-
-Compressed Installation
-~~~~~~~~~~~~~~~~~~~~~~~
-
-EasyInstall tries to install packages in zipped form, if it can. Zipping
-packages can improve Python's overall import performance if you're not using
-the ``--multi-version`` option, because Python processes zipfile entries on
-``sys.path`` much faster than it does directories.
-
-As of version 0.5a9, EasyInstall analyzes packages to determine whether they
-can be safely installed as a zipfile, and then acts on its analysis. (Previous
-versions would not install a package as a zipfile unless you used the
-``--zip-ok`` option.)
-
-The current analysis approach is fairly conservative; it currenly looks for:
-
- * Any use of the ``__file__`` or ``__path__`` variables (which should be
- replaced with ``pkg_resources`` API calls)
-
- * Possible use of ``inspect`` functions that expect to manipulate source files
- (e.g. ``inspect.getsource()``)
-
- * Top-level modules that might be scripts used with ``python -m`` (Python 2.4)
-
-If any of the above are found in the package being installed, EasyInstall will
-assume that the package cannot be safely run from a zipfile, and unzip it to
-a directory instead. You can override this analysis with the ``-zip-ok`` flag,
-which will tell EasyInstall to install the package as a zipfile anyway. Or,
-you can use the ``--always-unzip`` flag, in which case EasyInstall will always
-unzip, even if its analysis says the package is safe to run as a zipfile.
-
-Normally, however, it is simplest to let EasyInstall handle the determination
-of whether to zip or unzip, and only specify overrides when needed to work
-around a problem. If you find you need to override EasyInstall's guesses, you
-may want to contact the package author and the EasyInstall maintainers, so that
-they can make appropriate changes in future versions.
-
-(Note: If a package uses ``setuptools`` in its setup script, the package author
-has the option to declare the package safe or unsafe for zipped usage via the
-``zip_safe`` argument to ``setup()``. If the package author makes such a
-declaration, EasyInstall believes the package's author and does not perform its
-own analysis. However, your command-line option, if any, will still override
-the package author's choice.)
-
-
-Reference Manual
-================
-
-Configuration Files
--------------------
-
-(New in 0.4a2)
-
-You may specify default options for EasyInstall using the standard
-distutils configuration files, under the command heading ``easy_install``.
-EasyInstall will look first for a ``setup.cfg`` file in the current directory,
-then a ``~/.pydistutils.cfg`` or ``$HOME\\pydistutils.cfg`` (on Unix-like OSes
-and Windows, respectively), and finally a ``distutils.cfg`` file in the
-``distutils`` package directory. Here's a simple example:
-
-.. code-block:: ini
-
- [easy_install]
-
- # set the default location to install packages
- install_dir = /home/me/lib/python
-
- # Notice that indentation can be used to continue an option
- # value; this is especially useful for the "--find-links"
- # option, which tells easy_install to use download links on
- # these pages before consulting PyPI:
- #
- find_links = http://sqlobject.org/
- http://peak.telecommunity.com/dist/
-
-In addition to accepting configuration for its own options under
-``[easy_install]``, EasyInstall also respects defaults specified for other
-distutils commands. For example, if you don't set an ``install_dir`` for
-``[easy_install]``, but *have* set an ``install_lib`` for the ``[install]``
-command, this will become EasyInstall's default installation directory. Thus,
-if you are already using distutils configuration files to set default install
-locations, build options, etc., EasyInstall will respect your existing settings
-until and unless you override them explicitly in an ``[easy_install]`` section.
-
-For more information, see also the current Python documentation on the `use and
-location of distutils configuration files <http://docs.python.org/inst/config-syntax.html>`_.
-
-Notice that ``easy_install`` will use the ``setup.cfg`` from the current
-working directory only if it was triggered from ``setup.py`` through the
-``install_requires`` option. The standalone command will not use that file.
-
-Command-Line Options
---------------------
-
-``--zip-ok, -z``
- Install all packages as zip files, even if they are marked as unsafe for
- running as a zipfile. This can be useful when EasyInstall's analysis
- of a non-setuptools package is too conservative, but keep in mind that
- the package may not work correctly. (Changed in 0.5a9; previously this
- option was required in order for zipped installation to happen at all.)
-
-``--always-unzip, -Z``
- Don't install any packages as zip files, even if the packages are marked
- as safe for running as a zipfile. This can be useful if a package does
- something unsafe, but not in a way that EasyInstall can easily detect.
- EasyInstall's default analysis is currently very conservative, however, so
- you should only use this option if you've had problems with a particular
- package, and *after* reporting the problem to the package's maintainer and
- to the EasyInstall maintainers.
-
- (Note: the ``-z/-Z`` options only affect the installation of newly-built
- or downloaded packages that are not already installed in the target
- directory; if you want to convert an existing installed version from
- zipped to unzipped or vice versa, you'll need to delete the existing
- version first, and re-run EasyInstall.)
-
-``--multi-version, -m``
- "Multi-version" mode. Specifying this option prevents ``easy_install`` from
- adding an ``easy-install.pth`` entry for the package being installed, and
- if an entry for any version the package already exists, it will be removed
- upon successful installation. In multi-version mode, no specific version of
- the package is available for importing, unless you use
- ``pkg_resources.require()`` to put it on ``sys.path``. This can be as
- simple as::
-
- from pkg_resources import require
- require("SomePackage", "OtherPackage", "MyPackage")
-
- which will put the latest installed version of the specified packages on
- ``sys.path`` for you. (For more advanced uses, like selecting specific
- versions and enabling optional dependencies, see the ``pkg_resources`` API
- doc.)
-
- Changed in 0.6a10: this option is no longer silently enabled when
- installing to a non-PYTHONPATH, non-"site" directory. You must always
- explicitly use this option if you want it to be active.
-
-``--upgrade, -U`` (New in 0.5a4)
- By default, EasyInstall only searches online if a project/version
- requirement can't be met by distributions already installed
- on sys.path or the installation directory. However, if you supply the
- ``--upgrade`` or ``-U`` flag, EasyInstall will always check the package
- index and ``--find-links`` URLs before selecting a version to install. In
- this way, you can force EasyInstall to use the latest available version of
- any package it installs (subject to any version requirements that might
- exclude such later versions).
-
-``--install-dir=DIR, -d DIR``
- Set the installation directory. It is up to you to ensure that this
- directory is on ``sys.path`` at runtime, and to use
- ``pkg_resources.require()`` to enable the installed package(s) that you
- need.
-
- (New in 0.4a2) If this option is not directly specified on the command line
- or in a distutils configuration file, the distutils default installation
- location is used. Normally, this would be the ``site-packages`` directory,
- but if you are using distutils configuration files, setting things like
- ``prefix`` or ``install_lib``, then those settings are taken into
- account when computing the default installation directory, as is the
- ``--prefix`` option.
-
-``--script-dir=DIR, -s DIR``
- Set the script installation directory. If you don't supply this option
- (via the command line or a configuration file), but you *have* supplied
- an ``--install-dir`` (via command line or config file), then this option
- defaults to the same directory, so that the scripts will be able to find
- their associated package installation. Otherwise, this setting defaults
- to the location where the distutils would normally install scripts, taking
- any distutils configuration file settings into account.
-
-``--exclude-scripts, -x``
- Don't install scripts. This is useful if you need to install multiple
- versions of a package, but do not want to reset the version that will be
- run by scripts that are already installed.
-
-``--user`` (New in 0.6.11)
- Use the the user-site-packages as specified in :pep:`370`
- instead of the global site-packages.
-
-``--always-copy, -a`` (New in 0.5a4)
- Copy all needed distributions to the installation directory, even if they
- are already present in a directory on sys.path. In older versions of
- EasyInstall, this was the default behavior, but now you must explicitly
- request it. By default, EasyInstall will no longer copy such distributions
- from other sys.path directories to the installation directory, unless you
- explicitly gave the distribution's filename on the command line.
-
- Note that as of 0.6a10, using this option excludes "system" and
- "development" eggs from consideration because they can't be reliably
- copied. This may cause EasyInstall to choose an older version of a package
- than what you expected, or it may cause downloading and installation of a
- fresh copy of something that's already installed. You will see warning
- messages for any eggs that EasyInstall skips, before it falls back to an
- older version or attempts to download a fresh copy.
-
-``--find-links=URLS_OR_FILENAMES, -f URLS_OR_FILENAMES``
- Scan the specified "download pages" or directories for direct links to eggs
- or other distributions. Any existing file or directory names or direct
- download URLs are immediately added to EasyInstall's search cache, and any
- indirect URLs (ones that don't point to eggs or other recognized archive
- formats) are added to a list of additional places to search for download
- links. As soon as EasyInstall has to go online to find a package (either
- because it doesn't exist locally, or because ``--upgrade`` or ``-U`` was
- used), the specified URLs will be downloaded and scanned for additional
- direct links.
-
- Eggs and archives found by way of ``--find-links`` are only downloaded if
- they are needed to meet a requirement specified on the command line; links
- to unneeded packages are ignored.
-
- If all requested packages can be found using links on the specified
- download pages, the Python Package Index will not be consulted unless you
- also specified the ``--upgrade`` or ``-U`` option.
-
- (Note: if you want to refer to a local HTML file containing links, you must
- use a ``file:`` URL, as filenames that do not refer to a directory, egg, or
- archive are ignored.)
-
- You may specify multiple URLs or file/directory names with this option,
- separated by whitespace. Note that on the command line, you will probably
- have to surround the URL list with quotes, so that it is recognized as a
- single option value. You can also specify URLs in a configuration file;
- see `Configuration Files`_, above.
-
- Changed in 0.6a10: previously all URLs and directories passed to this
- option were scanned as early as possible, but from 0.6a10 on, only
- directories and direct archive links are scanned immediately; URLs are not
- retrieved unless a package search was already going to go online due to a
- package not being available locally, or due to the use of the ``--update``
- or ``-U`` option.
-
-``--no-find-links`` Blocks the addition of any link.
- This parameter is useful if you want to avoid adding links defined in a
- project easy_install is installing (whether it's a requested project or a
- dependency). When used, ``--find-links`` is ignored.
-
- Added in Distribute 0.6.11 and Setuptools 0.7.
-
-``--index-url=URL, -i URL`` (New in 0.4a1; default changed in 0.6c7)
- Specifies the base URL of the Python Package Index. The default is
- https://pypi.python.org/simple if not specified. When a package is requested
- that is not locally available or linked from a ``--find-links`` download
- page, the package index will be searched for download pages for the needed
- package, and those download pages will be searched for links to download
- an egg or source distribution.
-
-``--editable, -e`` (New in 0.6a1)
- Only find and download source distributions for the specified projects,
- unpacking them to subdirectories of the specified ``--build-directory``.
- EasyInstall will not actually build or install the requested projects or
- their dependencies; it will just find and extract them for you. See
- `Editing and Viewing Source Packages`_ above for more details.
-
-``--build-directory=DIR, -b DIR`` (UPDATED in 0.6a1)
- Set the directory used to build source packages. If a package is built
- from a source distribution or checkout, it will be extracted to a
- subdirectory of the specified directory. The subdirectory will have the
- same name as the extracted distribution's project, but in all-lowercase.
- If a file or directory of that name already exists in the given directory,
- a warning will be printed to the console, and the build will take place in
- a temporary directory instead.
-
- This option is most useful in combination with the ``--editable`` option,
- which forces EasyInstall to *only* find and extract (but not build and
- install) source distributions. See `Editing and Viewing Source Packages`_,
- above, for more information.
-
-``--verbose, -v, --quiet, -q`` (New in 0.4a4)
- Control the level of detail of EasyInstall's progress messages. The
- default detail level is "info", which prints information only about
- relatively time-consuming operations like running a setup script, unpacking
- an archive, or retrieving a URL. Using ``-q`` or ``--quiet`` drops the
- detail level to "warn", which will only display installation reports,
- warnings, and errors. Using ``-v`` or ``--verbose`` increases the detail
- level to include individual file-level operations, link analysis messages,
- and distutils messages from any setup scripts that get run. If you include
- the ``-v`` option more than once, the second and subsequent uses are passed
- down to any setup scripts, increasing the verbosity of their reporting as
- well.
-
-``--dry-run, -n`` (New in 0.4a4)
- Don't actually install the package or scripts. This option is passed down
- to any setup scripts run, so packages should not actually build either.
- This does *not* skip downloading, nor does it skip extracting source
- distributions to a temporary/build directory.
-
-``--optimize=LEVEL``, ``-O LEVEL`` (New in 0.4a4)
- If you are installing from a source distribution, and are *not* using the
- ``--zip-ok`` option, this option controls the optimization level for
- compiling installed ``.py`` files to ``.pyo`` files. It does not affect
- the compilation of modules contained in ``.egg`` files, only those in
- ``.egg`` directories. The optimization level can be set to 0, 1, or 2;
- the default is 0 (unless it's set under ``install`` or ``install_lib`` in
- one of your distutils configuration files).
-
-``--record=FILENAME`` (New in 0.5a4)
- Write a record of all installed files to FILENAME. This is basically the
- same as the same option for the standard distutils "install" command, and
- is included for compatibility with tools that expect to pass this option
- to "setup.py install".
-
-``--site-dirs=DIRLIST, -S DIRLIST`` (New in 0.6a1)
- Specify one or more custom "site" directories (separated by commas).
- "Site" directories are directories where ``.pth`` files are processed, such
- as the main Python ``site-packages`` directory. As of 0.6a10, EasyInstall
- automatically detects whether a given directory processes ``.pth`` files
- (or can be made to do so), so you should not normally need to use this
- option. It is is now only necessary if you want to override EasyInstall's
- judgment and force an installation directory to be treated as if it
- supported ``.pth`` files.
-
-``--no-deps, -N`` (New in 0.6a6)
- Don't install any dependencies. This is intended as a convenience for
- tools that wrap eggs in a platform-specific packaging system. (We don't
- recommend that you use it for anything else.)
-
-``--allow-hosts=PATTERNS, -H PATTERNS`` (New in 0.6a6)
- Restrict downloading and spidering to hosts matching the specified glob
- patterns. E.g. ``-H *.python.org`` restricts web access so that only
- packages listed and downloadable from machines in the ``python.org``
- domain. The glob patterns must match the *entire* user/host/port section of
- the target URL(s). For example, ``*.python.org`` will NOT accept a URL
- like ``http://python.org/foo`` or ``http://www.python.org:8080/``.
- Multiple patterns can be specified by separating them with commas. The
- default pattern is ``*``, which matches anything.
-
- In general, this option is mainly useful for blocking EasyInstall's web
- access altogether (e.g. ``-Hlocalhost``), or to restrict it to an intranet
- or other trusted site. EasyInstall will do the best it can to satisfy
- dependencies given your host restrictions, but of course can fail if it
- can't find suitable packages. EasyInstall displays all blocked URLs, so
- that you can adjust your ``--allow-hosts`` setting if it is more strict
- than you intended. Some sites may wish to define a restrictive default
- setting for this option in their `configuration files`_, and then manually
- override the setting on the command line as needed.
-
-``--prefix=DIR`` (New in 0.6a10)
- Use the specified directory as a base for computing the default
- installation and script directories. On Windows, the resulting default
- directories will be ``prefix\\Lib\\site-packages`` and ``prefix\\Scripts``,
- while on other platforms the defaults will be
- ``prefix/lib/python2.X/site-packages`` (with the appropriate version
- substituted) for libraries and ``prefix/bin`` for scripts.
-
- Note that the ``--prefix`` option only sets the *default* installation and
- script directories, and does not override the ones set on the command line
- or in a configuration file.
-
-``--local-snapshots-ok, -l`` (New in 0.6c6)
- Normally, EasyInstall prefers to only install *released* versions of
- projects, not in-development ones, because such projects may not
- have a currently-valid version number. So, it usually only installs them
- when their ``setup.py`` directory is explicitly passed on the command line.
-
- However, if this option is used, then any in-development projects that were
- installed using the ``setup.py develop`` command, will be used to build
- eggs, effectively upgrading the "in-development" project to a snapshot
- release. Normally, this option is used only in conjunction with the
- ``--always-copy`` option to create a distributable snapshot of every egg
- needed to run an application.
-
- Note that if you use this option, you must make sure that there is a valid
- version number (such as an SVN revision number tag) for any in-development
- projects that may be used, as otherwise EasyInstall may not be able to tell
- what version of the project is "newer" when future installations or
- upgrades are attempted.
-
-
-.. _non-root installation:
-
-Custom Installation Locations
------------------------------
-
-By default, EasyInstall installs python packages into Python's main ``site-packages`` directory,
-and manages them using a custom ``.pth`` file in that same directory.
-
-Very often though, a user or developer wants ``easy_install`` to install and manage python packages
-in an alternative location, usually for one of 3 reasons:
-
-1. They don't have access to write to the main Python site-packages directory.
-
-2. They want a user-specific stash of packages, that is not visible to other users.
-
-3. They want to isolate a set of packages to a specific python application, usually to minimize
- the possibility of version conflicts.
-
-Historically, there have been many approaches to achieve custom installation.
-The following section lists only the easiest and most relevant approaches [1]_.
-
-`Use the "--user" option`_
-
-`Use the "--user" option and customize "PYTHONUSERBASE"`_
-
-`Use "virtualenv"`_
-
-.. [1] There are older ways to achieve custom installation using various ``easy_install`` and ``setup.py install`` options, combined with ``PYTHONPATH`` and/or ``PYTHONUSERBASE`` alterations, but all of these are effectively deprecated by the User scheme brought in by `PEP-370`_ in Python 2.6.
-
-.. _PEP-370: http://www.python.org/dev/peps/pep-0370/
-
-
-Use the "--user" option
-~~~~~~~~~~~~~~~~~~~~~~~
-With Python 2.6 came the User scheme for installation, which means that all
-python distributions support an alternative install location that is specific to a user [2]_ [3]_.
-The Default location for each OS is explained in the python documentation
-for the ``site.USER_BASE`` variable. This mode of installation can be turned on by
-specifying the ``--user`` option to ``setup.py install`` or ``easy_install``.
-This approach serves the need to have a user-specific stash of packages.
-
-.. [2] Prior to Python2.6, Mac OS X offered a form of the User scheme. That is now subsumed into the User scheme introduced in Python 2.6.
-.. [3] Prior to the User scheme, there was the Home scheme, which is still available, but requires more effort than the User scheme to get packages recognized.
-
-Use the "--user" option and customize "PYTHONUSERBASE"
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The User scheme install location can be customized by setting the ``PYTHONUSERBASE`` environment
-variable, which updates the value of ``site.USER_BASE``. To isolate packages to a specific
-application, simply set the OS environment of that application to a specific value of
-``PYTHONUSERBASE``, that contains just those packages.
-
-Use "virtualenv"
-~~~~~~~~~~~~~~~~
-"virtualenv" is a 3rd-party python package that effectively "clones" a python installation, thereby
-creating an isolated location to install packages. The evolution of "virtualenv" started before the existence
-of the User installation scheme. "virtualenv" provides a version of ``easy_install`` that is
-scoped to the cloned python install and is used in the normal way. "virtualenv" does offer various features
-that the User installation scheme alone does not provide, e.g. the ability to hide the main python site-packages.
-
-Please refer to the `virtualenv`_ documentation for more details.
-
-.. _virtualenv: https://pypi.python.org/pypi/virtualenv
-
-
-
-Package Index "API"
--------------------
-
-Custom package indexes (and PyPI) must follow the following rules for
-EasyInstall to be able to look up and download packages:
-
-1. Except where stated otherwise, "pages" are HTML or XHTML, and "links"
- refer to ``href`` attributes.
-
-2. Individual project version pages' URLs must be of the form
- ``base/projectname/version``, where ``base`` is the package index's base URL.
-
-3. Omitting the ``/version`` part of a project page's URL (but keeping the
- trailing ``/``) should result in a page that is either:
-
- a) The single active version of that project, as though the version had been
- explicitly included, OR
-
- b) A page with links to all of the active version pages for that project.
-
-4. Individual project version pages should contain direct links to downloadable
- distributions where possible. It is explicitly permitted for a project's
- "long_description" to include URLs, and these should be formatted as HTML
- links by the package index, as EasyInstall does no special processing to
- identify what parts of a page are index-specific and which are part of the
- project's supplied description.
-
-5. Where available, MD5 information should be added to download URLs by
- appending a fragment identifier of the form ``#md5=...``, where ``...`` is
- the 32-character hex MD5 digest. EasyInstall will verify that the
- downloaded file's MD5 digest matches the given value.
-
-6. Individual project version pages should identify any "homepage" or
- "download" URLs using ``rel="homepage"`` and ``rel="download"`` attributes
- on the HTML elements linking to those URLs. Use of these attributes will
- cause EasyInstall to always follow the provided links, unless it can be
- determined by inspection that they are downloadable distributions. If the
- links are not to downloadable distributions, they are retrieved, and if they
- are HTML, they are scanned for download links. They are *not* scanned for
- additional "homepage" or "download" links, as these are only processed for
- pages that are part of a package index site.
-
-7. The root URL of the index, if retrieved with a trailing ``/``, must result
- in a page containing links to *all* projects' active version pages.
-
- (Note: This requirement is a workaround for the absence of case-insensitive
- ``safe_name()`` matching of project names in URL paths. If project names are
- matched in this fashion (e.g. via the PyPI server, mod_rewrite, or a similar
- mechanism), then it is not necessary to include this all-packages listing
- page.)
-
-8. If a package index is accessed via a ``file://`` URL, then EasyInstall will
- automatically use ``index.html`` files, if present, when trying to read a
- directory with a trailing ``/`` on the URL.
-
-
-Backward Compatibility
-~~~~~~~~~~~~~~~~~~~~~~
-
-Package indexes that wish to support setuptools versions prior to 0.6b4 should
-also follow these rules:
-
-* Homepage and download links must be preceded with ``"<th>Home Page"`` or
- ``"<th>Download URL"``, in addition to (or instead of) the ``rel=""``
- attributes on the actual links. These marker strings do not need to be
- visible, or uncommented, however! For example, the following is a valid
- homepage link that will work with any version of setuptools::
-
- <li>
- <strong>Home Page:</strong>
- <!-- <th>Home Page -->
- <a rel="homepage" href="http://sqlobject.org">http://sqlobject.org</a>
- </li>
-
- Even though the marker string is in an HTML comment, older versions of
- EasyInstall will still "see" it and know that the link that follows is the
- project's home page URL.
-
-* The pages described by paragraph 3(b) of the preceding section *must*
- contain the string ``"Index of Packages</title>"`` somewhere in their text.
- This can be inside of an HTML comment, if desired, and it can be anywhere
- in the page. (Note: this string MUST NOT appear on normal project pages, as
- described in paragraphs 2 and 3(a)!)
-
-In addition, for compatibility with PyPI versions that do not use ``#md5=``
-fragment IDs, EasyInstall uses the following regular expression to match PyPI's
-displayed MD5 info (broken onto two lines for readability)::
-
- <a href="([^"#]+)">([^<]+)</a>\n\s+\(<a href="[^?]+\?:action=show_md5
- &amp;digest=([0-9a-f]{32})">md5</a>\)
-
-History
-=======
-
-0.6c9
- * Fixed ``win32.exe`` support for .pth files, so unnecessary directory nesting
- is flattened out in the resulting egg. (There was a case-sensitivity
- problem that affected some distributions, notably ``pywin32``.)
-
- * Prevent ``--help-commands`` and other junk from showing under Python 2.5
- when running ``easy_install --help``.
-
- * Fixed GUI scripts sometimes not executing on Windows
-
- * Fixed not picking up dependency links from recursive dependencies.
-
- * Only make ``.py``, ``.dll`` and ``.so`` files executable when unpacking eggs
-
- * Changes for Jython compatibility
-
- * Improved error message when a requirement is also a directory name, but the
- specified directory is not a source package.
-
- * Fixed ``--allow-hosts`` option blocking ``file:`` URLs
-
- * Fixed HTTP SVN detection failing when the page title included a project
- name (e.g. on SourceForge-hosted SVN)
-
- * Fix Jython script installation to handle ``#!`` lines better when
- ``sys.executable`` is a script.
-
- * Removed use of deprecated ``md5`` module if ``hashlib`` is available
-
- * Keep site directories (e.g. ``site-packages``) from being included in
- ``.pth`` files.
-
-0.6c7
- * ``ftp:`` download URLs now work correctly.
-
- * The default ``--index-url`` is now ``https://pypi.python.org/simple``, to use
- the Python Package Index's new simpler (and faster!) REST API.
-
-0.6c6
- * EasyInstall no longer aborts the installation process if a URL it wants to
- retrieve can't be downloaded, unless the URL is an actual package download.
- Instead, it issues a warning and tries to keep going.
-
- * Fixed distutils-style scripts originally built on Windows having their line
- endings doubled when installed on any platform.
-
- * Added ``--local-snapshots-ok`` flag, to allow building eggs from projects
- installed using ``setup.py develop``.
-
- * Fixed not HTML-decoding URLs scraped from web pages
-
-0.6c5
- * Fixed ``.dll`` files on Cygwin not having executable permissions when an egg
- is installed unzipped.
-
-0.6c4
- * Added support for HTTP "Basic" authentication using ``http://user:pass@host``
- URLs. If a password-protected page contains links to the same host (and
- protocol), those links will inherit the credentials used to access the
- original page.
-
- * Removed all special support for Sourceforge mirrors, as Sourceforge's
- mirror system now works well for non-browser downloads.
-
- * Fixed not recognizing ``win32.exe`` installers that included a custom
- bitmap.
-
- * Fixed not allowing ``os.open()`` of paths outside the sandbox, even if they
- are opened read-only (e.g. reading ``/dev/urandom`` for random numbers, as
- is done by ``os.urandom()`` on some platforms).
-
- * Fixed a problem with ``.pth`` testing on Windows when ``sys.executable``
- has a space in it (e.g., the user installed Python to a ``Program Files``
- directory).
-
-0.6c3
- * You can once again use "python -m easy_install" with Python 2.4 and above.
-
- * Python 2.5 compatibility fixes added.
-
-0.6c2
- * Windows script wrappers now support quoted arguments and arguments
- containing spaces. (Patch contributed by Jim Fulton.)
-
- * The ``ez_setup.py`` script now actually works when you put a setuptools
- ``.egg`` alongside it for bootstrapping an offline machine.
-
- * A writable installation directory on ``sys.path`` is no longer required to
- download and extract a source distribution using ``--editable``.
-
- * Generated scripts now use ``-x`` on the ``#!`` line when ``sys.executable``
- contains non-ASCII characters, to prevent deprecation warnings about an
- unspecified encoding when the script is run.
-
-0.6c1
- * EasyInstall now includes setuptools version information in the
- ``User-Agent`` string sent to websites it visits.
-
-0.6b4
- * Fix creating Python wrappers for non-Python scripts
-
- * Fix ``ftp://`` directory listing URLs from causing a crash when used in the
- "Home page" or "Download URL" slots on PyPI.
-
- * Fix ``sys.path_importer_cache`` not being updated when an existing zipfile
- or directory is deleted/overwritten.
-
- * Fix not recognizing HTML 404 pages from package indexes.
-
- * Allow ``file://`` URLs to be used as a package index. URLs that refer to
- directories will use an internally-generated directory listing if there is
- no ``index.html`` file in the directory.
-
- * Allow external links in a package index to be specified using
- ``rel="homepage"`` or ``rel="download"``, without needing the old
- PyPI-specific visible markup.
-
- * Suppressed warning message about possibly-misspelled project name, if an egg
- or link for that project name has already been seen.
-
-0.6b3
- * Fix local ``--find-links`` eggs not being copied except with
- ``--always-copy``.
-
- * Fix sometimes not detecting local packages installed outside of "site"
- directories.
-
- * Fix mysterious errors during initial ``setuptools`` install, caused by
- ``ez_setup`` trying to run ``easy_install`` twice, due to a code fallthru
- after deleting the egg from which it's running.
-
-0.6b2
- * Don't install or update a ``site.py`` patch when installing to a
- ``PYTHONPATH`` directory with ``--multi-version``, unless an
- ``easy-install.pth`` file is already in use there.
-
- * Construct ``.pth`` file paths in such a way that installing an egg whose
- name begins with ``import`` doesn't cause a syntax error.
-
- * Fixed a bogus warning message that wasn't updated since the 0.5 versions.
-
-0.6b1
- * Better ambiguity management: accept ``#egg`` name/version even if processing
- what appears to be a correctly-named distutils file, and ignore ``.egg``
- files with no ``-``, since valid Python ``.egg`` files always have a version
- number (but Scheme eggs often don't).
-
- * Support ``file://`` links to directories in ``--find-links``, so that
- easy_install can build packages from local source checkouts.
-
- * Added automatic retry for Sourceforge mirrors. The new download process is
- to first just try dl.sourceforge.net, then randomly select mirror IPs and
- remove ones that fail, until something works. The removed IPs stay removed
- for the remainder of the run.
-
- * Ignore bdist_dumb distributions when looking at download URLs.
-
-0.6a11
- * Process ``dependency_links.txt`` if found in a distribution, by adding the
- URLs to the list for scanning.
-
- * Use relative paths in ``.pth`` files when eggs are being installed to the
- same directory as the ``.pth`` file. This maximizes portability of the
- target directory when building applications that contain eggs.
-
- * Added ``easy_install-N.N`` script(s) for convenience when using multiple
- Python versions.
-
- * Added automatic handling of installation conflicts. Eggs are now shifted to
- the front of sys.path, in an order consistent with where they came from,
- making EasyInstall seamlessly co-operate with system package managers.
-
- The ``--delete-conflicting`` and ``--ignore-conflicts-at-my-risk`` options
- are now no longer necessary, and will generate warnings at the end of a
- run if you use them.
-
- * Don't recursively traverse subdirectories given to ``--find-links``.
-
-0.6a10
- * Added exhaustive testing of the install directory, including a spawn test
- for ``.pth`` file support, and directory writability/existence checks. This
- should virtually eliminate the need to set or configure ``--site-dirs``.
-
- * Added ``--prefix`` option for more do-what-I-mean-ishness in the absence of
- RTFM-ing. :)
-
- * Enhanced ``PYTHONPATH`` support so that you don't have to put any eggs on it
- manually to make it work. ``--multi-version`` is no longer a silent
- default; you must explicitly use it if installing to a non-PYTHONPATH,
- non-"site" directory.
-
- * Expand ``$variables`` used in the ``--site-dirs``, ``--build-directory``,
- ``--install-dir``, and ``--script-dir`` options, whether on the command line
- or in configuration files.
-
- * Improved SourceForge mirror processing to work faster and be less affected
- by transient HTML changes made by SourceForge.
-
- * PyPI searches now use the exact spelling of requirements specified on the
- command line or in a project's ``install_requires``. Previously, a
- normalized form of the name was used, which could lead to unnecessary
- full-index searches when a project's name had an underscore (``_``) in it.
-
- * EasyInstall can now download bare ``.py`` files and wrap them in an egg,
- as long as you include an ``#egg=name-version`` suffix on the URL, or if
- the ``.py`` file is listed as the "Download URL" on the project's PyPI page.
- This allows third parties to "package" trivial Python modules just by
- linking to them (e.g. from within their own PyPI page or download links
- page).
-
- * The ``--always-copy`` option now skips "system" and "development" eggs since
- they can't be reliably copied. Note that this may cause EasyInstall to
- choose an older version of a package than what you expected, or it may cause
- downloading and installation of a fresh version of what's already installed.
-
- * The ``--find-links`` option previously scanned all supplied URLs and
- directories as early as possible, but now only directories and direct
- archive links are scanned immediately. URLs are not retrieved unless a
- package search was already going to go online due to a package not being
- available locally, or due to the use of the ``--update`` or ``-U`` option.
-
- * Fixed the annoying ``--help-commands`` wart.
-
-0.6a9
- * Fixed ``.pth`` file processing picking up nested eggs (i.e. ones inside
- "baskets") when they weren't explicitly listed in the ``.pth`` file.
-
- * If more than one URL appears to describe the exact same distribution, prefer
- the shortest one. This helps to avoid "table of contents" CGI URLs like the
- ones on effbot.org.
-
- * Quote arguments to python.exe (including python's path) to avoid problems
- when Python (or a script) is installed in a directory whose name contains
- spaces on Windows.
-
- * Support full roundtrip translation of eggs to and from ``bdist_wininst``
- format. Running ``bdist_wininst`` on a setuptools-based package wraps the
- egg in an .exe that will safely install it as an egg (i.e., with metadata
- and entry-point wrapper scripts), and ``easy_install`` can turn the .exe
- back into an ``.egg`` file or directory and install it as such.
-
-0.6a8
- * Update for changed SourceForge mirror format
-
- * Fixed not installing dependencies for some packages fetched via Subversion
-
- * Fixed dependency installation with ``--always-copy`` not using the same
- dependency resolution procedure as other operations.
-
- * Fixed not fully removing temporary directories on Windows, if a Subversion
- checkout left read-only files behind
-
- * Fixed some problems building extensions when Pyrex was installed, especially
- with Python 2.4 and/or packages using SWIG.
-
-0.6a7
- * Fixed not being able to install Windows script wrappers using Python 2.3
-
-0.6a6
- * Added support for "traditional" PYTHONPATH-based non-root installation, and
- also the convenient ``virtual-python.py`` script, based on a contribution
- by Ian Bicking. The setuptools egg now contains a hacked ``site`` module
- that makes the PYTHONPATH-based approach work with .pth files, so that you
- can get the full EasyInstall feature set on such installations.
-
- * Added ``--no-deps`` and ``--allow-hosts`` options.
-
- * Improved Windows ``.exe`` script wrappers so that the script can have the
- same name as a module without confusing Python.
-
- * Changed dependency processing so that it's breadth-first, allowing a
- depender's preferences to override those of a dependee, to prevent conflicts
- when a lower version is acceptable to the dependee, but not the depender.
- Also, ensure that currently installed/selected packages aren't given
- precedence over ones desired by a package being installed, which could
- cause conflict errors.
-
-0.6a3
- * Improved error message when trying to use old ways of running
- ``easy_install``. Removed the ability to run via ``python -m`` or by
- running ``easy_install.py``; ``easy_install`` is the command to run on all
- supported platforms.
-
- * Improved wrapper script generation and runtime initialization so that a
- VersionConflict doesn't occur if you later install a competing version of a
- needed package as the default version of that package.
-
- * Fixed a problem parsing version numbers in ``#egg=`` links.
-
-0.6a2
- * EasyInstall can now install "console_scripts" defined by packages that use
- ``setuptools`` and define appropriate entry points. On Windows, console
- scripts get an ``.exe`` wrapper so you can just type their name. On other
- platforms, the scripts are installed without a file extension.
-
- * Using ``python -m easy_install`` or running ``easy_install.py`` is now
- DEPRECATED, since an ``easy_install`` wrapper is now available on all
- platforms.
-
-0.6a1
- * EasyInstall now does MD5 validation of downloads from PyPI, or from any link
- that has an "#md5=..." trailer with a 32-digit lowercase hex md5 digest.
-
- * EasyInstall now handles symlinks in target directories by removing the link,
- rather than attempting to overwrite the link's destination. This makes it
- easier to set up an alternate Python "home" directory (as described above in
- the `Non-Root Installation`_ section).
-
- * Added support for handling MacOS platform information in ``.egg`` filenames,
- based on a contribution by Kevin Dangoor. You may wish to delete and
- reinstall any eggs whose filename includes "darwin" and "Power_Macintosh",
- because the format for this platform information has changed so that minor
- OS X upgrades (such as 10.4.1 to 10.4.2) do not cause eggs built with a
- previous OS version to become obsolete.
-
- * easy_install's dependency processing algorithms have changed. When using
- ``--always-copy``, it now ensures that dependencies are copied too. When
- not using ``--always-copy``, it tries to use a single resolution loop,
- rather than recursing.
-
- * Fixed installing extra ``.pyc`` or ``.pyo`` files for scripts with ``.py``
- extensions.
-
- * Added ``--site-dirs`` option to allow adding custom "site" directories.
- Made ``easy-install.pth`` work in platform-specific alternate site
- directories (e.g. ``~/Library/Python/2.x/site-packages`` on Mac OS X).
-
- * If you manually delete the current version of a package, the next run of
- EasyInstall against the target directory will now remove the stray entry
- from the ``easy-install.pth`` file.
-
- * EasyInstall now recognizes URLs with a ``#egg=project_name`` fragment ID
- as pointing to the named project's source checkout. Such URLs have a lower
- match precedence than any other kind of distribution, so they'll only be
- used if they have a higher version number than any other available
- distribution, or if you use the ``--editable`` option. The ``#egg``
- fragment can contain a version if it's formatted as ``#egg=proj-ver``,
- where ``proj`` is the project name, and ``ver`` is the version number. You
- *must* use the format for these values that the ``bdist_egg`` command uses;
- i.e., all non-alphanumeric runs must be condensed to single underscore
- characters.
-
- * Added the ``--editable`` option; see `Editing and Viewing Source Packages`_
- above for more info. Also, slightly changed the behavior of the
- ``--build-directory`` option.
-
- * Fixed the setup script sandbox facility not recognizing certain paths as
- valid on case-insensitive platforms.
-
-0.5a12
- * Fix ``python -m easy_install`` not working due to setuptools being installed
- as a zipfile. Update safety scanner to check for modules that might be used
- as ``python -m`` scripts.
-
- * Misc. fixes for win32.exe support, including changes to support Python 2.4's
- changed ``bdist_wininst`` format.
-
-0.5a10
- * Put the ``easy_install`` module back in as a module, as it's needed for
- ``python -m`` to run it!
-
- * Allow ``--find-links/-f`` to accept local directories or filenames as well
- as URLs.
-
-0.5a9
- * EasyInstall now automatically detects when an "unmanaged" package or
- module is going to be on ``sys.path`` ahead of a package you're installing,
- thereby preventing the newer version from being imported. By default, it
- will abort installation to alert you of the problem, but there are also
- new options (``--delete-conflicting`` and ``--ignore-conflicts-at-my-risk``)
- available to change the default behavior. (Note: this new feature doesn't
- take effect for egg files that were built with older ``setuptools``
- versions, because they lack the new metadata file required to implement it.)
-
- * The ``easy_install`` distutils command now uses ``DistutilsError`` as its
- base error type for errors that should just issue a message to stderr and
- exit the program without a traceback.
-
- * EasyInstall can now be given a path to a directory containing a setup
- script, and it will attempt to build and install the package there.
-
- * EasyInstall now performs a safety analysis on module contents to determine
- whether a package is likely to run in zipped form, and displays
- information about what modules may be doing introspection that would break
- when running as a zipfile.
-
- * Added the ``--always-unzip/-Z`` option, to force unzipping of packages that
- would ordinarily be considered safe to unzip, and changed the meaning of
- ``--zip-ok/-z`` to "always leave everything zipped".
-
-0.5a8
- * There is now a separate documentation page for `setuptools`_; revision
- history that's not specific to EasyInstall has been moved to that page.
-
- .. _setuptools: http://peak.telecommunity.com/DevCenter/setuptools
-
-0.5a5
- * Made ``easy_install`` a standard ``setuptools`` command, moving it from
- the ``easy_install`` module to ``setuptools.command.easy_install``. Note
- that if you were importing or extending it, you must now change your imports
- accordingly. ``easy_install.py`` is still installed as a script, but not as
- a module.
-
-0.5a4
- * Added ``--always-copy/-a`` option to always copy needed packages to the
- installation directory, even if they're already present elsewhere on
- sys.path. (In previous versions, this was the default behavior, but now
- you must request it.)
-
- * Added ``--upgrade/-U`` option to force checking PyPI for latest available
- version(s) of all packages requested by name and version, even if a matching
- version is available locally.
-
- * Added automatic installation of dependencies declared by a distribution
- being installed. These dependencies must be listed in the distribution's
- ``EGG-INFO`` directory, so the distribution has to have declared its
- dependencies by using setuptools. If a package has requirements it didn't
- declare, you'll still have to deal with them yourself. (E.g., by asking
- EasyInstall to find and install them.)
-
- * Added the ``--record`` option to ``easy_install`` for the benefit of tools
- that run ``setup.py install --record=filename`` on behalf of another
- packaging system.)
-
-0.5a3
- * Fixed not setting script permissions to allow execution.
-
- * Improved sandboxing so that setup scripts that want a temporary directory
- (e.g. pychecker) can still run in the sandbox.
-
-0.5a2
- * Fix stupid stupid refactoring-at-the-last-minute typos. :(
-
-0.5a1
- * Added support for converting ``.win32.exe`` installers to eggs on the fly.
- EasyInstall will now recognize such files by name and install them.
-
- * Fixed a problem with picking the "best" version to install (versions were
- being sorted as strings, rather than as parsed values)
-
-0.4a4
- * Added support for the distutils "verbose/quiet" and "dry-run" options, as
- well as the "optimize" flag.
-
- * Support downloading packages that were uploaded to PyPI (by scanning all
- links on package pages, not just the homepage/download links).
-
-0.4a3
- * Add progress messages to the search/download process so that you can tell
- what URLs it's reading to find download links. (Hopefully, this will help
- people report out-of-date and broken links to package authors, and to tell
- when they've asked for a package that doesn't exist.)
-
-0.4a2
- * Added support for installing scripts
-
- * Added support for setting options via distutils configuration files, and
- using distutils' default options as a basis for EasyInstall's defaults.
-
- * Renamed ``--scan-url/-s`` to ``--find-links/-f`` to free up ``-s`` for the
- script installation directory option.
-
- * Use ``urllib2`` instead of ``urllib``, to allow use of ``https:`` URLs if
- Python includes SSL support.
-
-0.4a1
- * Added ``--scan-url`` and ``--index-url`` options, to scan download pages
- and search PyPI for needed packages.
-
-0.3a4
- * Restrict ``--build-directory=DIR/-b DIR`` option to only be used with single
- URL installs, to avoid running the wrong setup.py.
-
-0.3a3
- * Added ``--build-directory=DIR/-b DIR`` option.
-
- * Added "installation report" that explains how to use 'require()' when doing
- a multiversion install or alternate installation directory.
-
- * Added SourceForge mirror auto-select (Contributed by Ian Bicking)
-
- * Added "sandboxing" that stops a setup script from running if it attempts to
- write to the filesystem outside of the build area
-
- * Added more workarounds for packages with quirky ``install_data`` hacks
-
-0.3a2
- * Added subversion download support for ``svn:`` and ``svn+`` URLs, as well as
- automatic recognition of HTTP subversion URLs (Contributed by Ian Bicking)
-
- * Misc. bug fixes
-
-0.3a1
- * Initial release.
-
-
-Future Plans
-============
-
-* Additional utilities to list/remove/verify packages
-* Signature checking? SSL? Ability to suppress PyPI search?
-* Display byte progress meter when downloading distributions and long pages?
-* Redirect stdout/stderr to log during run_setup?
-
diff --git a/docs/formats.txt b/docs/formats.txt
deleted file mode 100644
index 9e6fe727..00000000
--- a/docs/formats.txt
+++ /dev/null
@@ -1,682 +0,0 @@
-=====================================
-The Internal Structure of Python Eggs
-=====================================
-
-STOP! This is not the first document you should read!
-
-
-
-.. contents:: **Table of Contents**
-
-
-----------------------
-Eggs and their Formats
-----------------------
-
-A "Python egg" is a logical structure embodying the release of a
-specific version of a Python project, comprising its code, resources,
-and metadata. There are multiple formats that can be used to physically
-encode a Python egg, and others can be developed. However, a key
-principle of Python eggs is that they should be discoverable and
-importable. That is, it should be possible for a Python application to
-easily and efficiently find out what eggs are present on a system, and
-to ensure that the desired eggs' contents are importable.
-
-There are two basic formats currently implemented for Python eggs:
-
-1. ``.egg`` format: a directory or zipfile *containing* the project's
- code and resources, along with an ``EGG-INFO`` subdirectory that
- contains the project's metadata
-
-2. ``.egg-info`` format: a file or directory placed *adjacent* to the
- project's code and resources, that directly contains the project's
- metadata.
-
-Both formats can include arbitrary Python code and resources, including
-static data files, package and non-package directories, Python
-modules, C extension modules, and so on. But each format is optimized
-for different purposes.
-
-The ``.egg`` format is well-suited to distribution and the easy
-uninstallation or upgrades of code, since the project is essentially
-self-contained within a single directory or file, unmingled with any
-other projects' code or resources. It also makes it possible to have
-multiple versions of a project simultaneously installed, such that
-individual programs can select the versions they wish to use.
-
-The ``.egg-info`` format, on the other hand, was created to support
-backward-compatibility, performance, and ease of installation for system
-packaging tools that expect to install all projects' code and resources
-to a single directory (e.g. ``site-packages``). Placing the metadata
-in that same directory simplifies the installation process, since it
-isn't necessary to create ``.pth`` files or otherwise modify
-``sys.path`` to include each installed egg.
-
-Its disadvantage, however, is that it provides no support for clean
-uninstallation or upgrades, and of course only a single version of a
-project can be installed to a given directory. Thus, support from a
-package management tool is required. (This is why setuptools' "install"
-command refers to this type of egg installation as "single-version,
-externally managed".) Also, they lack sufficient data to allow them to
-be copied from their installation source. easy_install can "ship" an
-application by copying ``.egg`` files or directories to a target
-location, but it cannot do this for ``.egg-info`` installs, because
-there is no way to tell what code and resources belong to a particular
-egg -- there may be several eggs "scrambled" together in a single
-installation location, and the ``.egg-info`` format does not currently
-include a way to list the files that were installed. (This may change
-in a future version.)
-
-
-Code and Resources
-==================
-
-The layout of the code and resources is dictated by Python's normal
-import layout, relative to the egg's "base location".
-
-For the ``.egg`` format, the base location is the ``.egg`` itself. That
-is, adding the ``.egg`` filename or directory name to ``sys.path``
-makes its contents importable.
-
-For the ``.egg-info`` format, however, the base location is the
-directory that *contains* the ``.egg-info``, and thus it is the
-directory that must be added to ``sys.path`` to make the egg importable.
-(Note that this means that the "normal" installation of a package to a
-``sys.path`` directory is sufficient to make it an "egg" if it has an
-``.egg-info`` file or directory installed alongside of it.)
-
-
-Project Metadata
-=================
-
-If eggs contained only code and resources, there would of course be
-no difference between them and any other directory or zip file on
-``sys.path``. Thus, metadata must also be included, using a metadata
-file or directory.
-
-For the ``.egg`` format, the metadata is placed in an ``EGG-INFO``
-subdirectory, directly within the ``.egg`` file or directory. For the
-``.egg-info`` format, metadata is stored directly within the
-``.egg-info`` directory itself.
-
-The minimum project metadata that all eggs must have is a standard
-Python ``PKG-INFO`` file, named ``PKG-INFO`` and placed within the
-metadata directory appropriate to the format. Because it's possible for
-this to be the only metadata file included, ``.egg-info`` format eggs
-are not required to be a directory; they can just be a ``.egg-info``
-file that directly contains the ``PKG-INFO`` metadata. This eliminates
-the need to create a directory just to store one file. This option is
-*not* available for ``.egg`` formats, since setuptools always includes
-other metadata. (In fact, setuptools itself never generates
-``.egg-info`` files, either; the support for using files was added so
-that the requirement could easily be satisfied by other tools, such
-as the distutils in Python 2.5).
-
-In addition to the ``PKG-INFO`` file, an egg's metadata directory may
-also include files and directories representing various forms of
-optional standard metadata (see the section on `Standard Metadata`_,
-below) or user-defined metadata required by the project. For example,
-some projects may define a metadata format to describe their application
-plugins, and metadata in this format would then be included by plugin
-creators in their projects' metadata directories.
-
-
-Filename-Embedded Metadata
-==========================
-
-To allow introspection of installed projects and runtime resolution of
-inter-project dependencies, a certain amount of information is embedded
-in egg filenames. At a minimum, this includes the project name, and
-ideally will also include the project version number. Optionally, it
-can also include the target Python version and required runtime
-platform if platform-specific C code is included. The syntax of an
-egg filename is as follows::
-
- name ["-" version ["-py" pyver ["-" required_platform]]] "." ext
-
-The "name" and "version" should be escaped using the ``to_filename()``
-function provided by ``pkg_resources``, after first processing them with
-``safe_name()`` and ``safe_version()`` respectively. These latter two
-functions can also be used to later "unescape" these parts of the
-filename. (For a detailed description of these transformations, please
-see the "Parsing Utilities" section of the ``pkg_resources`` manual.)
-
-The "pyver" string is the Python major version, as found in the first
-3 characters of ``sys.version``. "required_platform" is essentially
-a distutils ``get_platform()`` string, but with enhancements to properly
-distinguish Mac OS versions. (See the ``get_build_platform()``
-documentation in the "Platform Utilities" section of the
-``pkg_resources`` manual for more details.)
-
-Finally, the "ext" is either ``.egg`` or ``.egg-info``, as appropriate
-for the egg's format.
-
-Normally, an egg's filename should include at least the project name and
-version, as this allows the runtime system to find desired project
-versions without having to read the egg's PKG-INFO to determine its
-version number.
-
-Setuptools, however, only includes the version number in the filename
-when an ``.egg`` file is built using the ``bdist_egg`` command, or when
-an ``.egg-info`` directory is being installed by the
-``install_egg_info`` command. When generating metadata for use with the
-original source tree, it only includes the project name, so that the
-directory will not have to be renamed each time the project's version
-changes.
-
-This is especially important when version numbers change frequently, and
-the source metadata directory is kept under version control with the
-rest of the project. (As would be the case when the project's source
-includes project-defined metadata that is not generated from by
-setuptools from data in the setup script.)
-
-
-Egg Links
-=========
-
-In addition to the ``.egg`` and ``.egg-info`` formats, there is a third
-egg-related extension that you may encounter on occasion: ``.egg-link``
-files.
-
-These files are not eggs, strictly speaking. They simply provide a way
-to reference an egg that is not physically installed in the desired
-location. They exist primarily as a cross-platform alternative to
-symbolic links, to support "installing" code that is being developed in
-a different location than the desired installation location. For
-example, if a user is developing an application plugin in their home
-directory, but the plugin needs to be "installed" in an application
-plugin directory, running "setup.py develop -md /path/to/app/plugins"
-will install an ``.egg-link`` file in ``/path/to/app/plugins``, that
-tells the egg runtime system where to find the actual egg (the user's
-project source directory and its ``.egg-info`` subdirectory).
-
-``.egg-link`` files are named following the format for ``.egg`` and
-``.egg-info`` names, but only the project name is included; no version,
-Python version, or platform information is included. When the runtime
-searches for available eggs, ``.egg-link`` files are opened and the
-actual egg file/directory name is read from them.
-
-Each ``.egg-link`` file should contain a single file or directory name,
-with no newlines. This filename should be the base location of one or
-more eggs. That is, the name must either end in ``.egg``, or else it
-should be the parent directory of one or more ``.egg-info`` format eggs.
-
-As of setuptools 0.6c6, the path may be specified as a platform-independent
-(i.e. ``/``-separated) relative path from the directory containing the
-``.egg-link`` file, and a second line may appear in the file, specifying a
-platform-independent relative path from the egg's base directory to its
-setup script directory. This allows installation tools such as EasyInstall
-to find the project's setup directory and build eggs or perform other setup
-commands on it.
-
-
------------------
-Standard Metadata
------------------
-
-In addition to the minimum required ``PKG-INFO`` metadata, projects can
-include a variety of standard metadata files or directories, as
-described below. Except as otherwise noted, these files and directories
-are automatically generated by setuptools, based on information supplied
-in the setup script or through analysis of the project's code and
-resources.
-
-Most of these files and directories are generated via "egg-info
-writers" during execution of the setuptools ``egg_info`` command, and
-are listed in the ``egg_info.writers`` entry point group defined by
-setuptools' own ``setup.py`` file.
-
-Project authors can register their own metadata writers as entry points
-in this group (as described in the setuptools manual under "Adding new
-EGG-INFO Files") to cause setuptools to generate project-specific
-metadata files or directories during execution of the ``egg_info``
-command. It is up to project authors to document these new metadata
-formats, if they create any.
-
-
-``.txt`` File Formats
-=====================
-
-Files described in this section that have ``.txt`` extensions have a
-simple lexical format consisting of a sequence of text lines, each line
-terminated by a linefeed character (regardless of platform). Leading
-and trailing whitespace on each line is ignored, as are blank lines and
-lines whose first nonblank character is a ``#`` (comment symbol). (This
-is the parsing format defined by the ``yield_lines()`` function of
-the ``pkg_resources`` module.)
-
-All ``.txt`` files defined by this section follow this format, but some
-are also "sectioned" files, meaning that their contents are divided into
-sections, using square-bracketed section headers akin to Windows
-``.ini`` format. Note that this does *not* imply that the lines within
-the sections follow an ``.ini`` format, however. Please see an
-individual metadata file's documentation for a description of what the
-lines and section names mean in that particular file.
-
-Sectioned files can be parsed using the ``split_sections()`` function;
-see the "Parsing Utilities" section of the ``pkg_resources`` manual for
-for details.
-
-
-Dependency Metadata
-===================
-
-
-``requires.txt``
-----------------
-
-This is a "sectioned" text file. Each section is a sequence of
-"requirements", as parsed by the ``parse_requirements()`` function;
-please see the ``pkg_resources`` manual for the complete requirement
-parsing syntax.
-
-The first, unnamed section (i.e., before the first section header) in
-this file is the project's core requirements, which must be installed
-for the project to function. (Specified using the ``install_requires``
-keyword to ``setup()``).
-
-The remaining (named) sections describe the project's "extra"
-requirements, as specified using the ``extras_require`` keyword to
-``setup()``. The section name is the name of the optional feature, and
-the section body lists that feature's dependencies.
-
-Note that it is not normally necessary to inspect this file directly;
-``pkg_resources.Distribution`` objects have a ``requires()`` method
-that can be used to obtain ``Requirement`` objects describing the
-project's core and optional dependencies.
-
-
-``setup_requires.txt``
-----------------------
-
-Much like ``requires.txt`` except represents the requirements
-specified by the ``setup_requires`` parameter to the Distribution.
-
-
-``dependency_links.txt``
-------------------------
-
-A list of dependency URLs, one per line, as specified using the
-``dependency_links`` keyword to ``setup()``. These may be direct
-download URLs, or the URLs of web pages containing direct download
-links, and will be used by EasyInstall to find dependencies, as though
-the user had manually provided them via the ``--find-links`` command
-line option. Please see the setuptools manual and EasyInstall manual
-for more information on specifying this option, and for information on
-how EasyInstall processes ``--find-links`` URLs.
-
-
-``depends.txt`` -- Obsolete, do not create!
--------------------------------------------
-
-This file follows an identical format to ``requires.txt``, but is
-obsolete and should not be used. The earliest versions of setuptools
-required users to manually create and maintain this file, so the runtime
-still supports reading it, if it exists. The new filename was created
-so that it could be automatically generated from ``setup()`` information
-without overwriting an existing hand-created ``depends.txt``, if one
-was already present in the project's source ``.egg-info`` directory.
-
-
-``namespace_packages.txt`` -- Namespace Package Metadata
-========================================================
-
-A list of namespace package names, one per line, as supplied to the
-``namespace_packages`` keyword to ``setup()``. Please see the manuals
-for setuptools and ``pkg_resources`` for more information about
-namespace packages.
-
-
-``entry_points.txt`` -- "Entry Point"/Plugin Metadata
-=====================================================
-
-This is a "sectioned" text file, whose contents encode the
-``entry_points`` keyword supplied to ``setup()``. All sections are
-named, as the section names specify the entry point groups in which the
-corresponding section's entry points are registered.
-
-Each section is a sequence of "entry point" lines, each parseable using
-the ``EntryPoint.parse`` classmethod; please see the ``pkg_resources``
-manual for the complete entry point parsing syntax.
-
-Note that it is not necessary to parse this file directly; the
-``pkg_resources`` module provides a variety of APIs to locate and load
-entry points automatically. Please see the setuptools and
-``pkg_resources`` manuals for details on the nature and uses of entry
-points.
-
-
-The ``scripts`` Subdirectory
-============================
-
-This directory is currently only created for ``.egg`` files built by
-the setuptools ``bdist_egg`` command. It will contain copies of all
-of the project's "traditional" scripts (i.e., those specified using the
-``scripts`` keyword to ``setup()``). This is so that they can be
-reconstituted when an ``.egg`` file is installed.
-
-The scripts are placed here using the distutils' standard
-``install_scripts`` command, so any ``#!`` lines reflect the Python
-installation where the egg was built. But instead of copying the
-scripts to the local script installation directory, EasyInstall writes
-short wrapper scripts that invoke the original scripts from inside the
-egg, after ensuring that sys.path includes the egg and any eggs it
-depends on. For more about `script wrappers`_, see the section below on
-`Installation and Path Management Issues`_.
-
-
-Zip Support Metadata
-====================
-
-
-``native_libs.txt``
--------------------
-
-A list of C extensions and other dynamic link libraries contained in
-the egg, one per line. Paths are ``/``-separated and relative to the
-egg's base location.
-
-This file is generated as part of ``bdist_egg`` processing, and as such
-only appears in ``.egg`` files (and ``.egg`` directories created by
-unpacking them). It is used to ensure that all libraries are extracted
-from a zipped egg at the same time, in case there is any direct linkage
-between them. Please see the `Zip File Issues`_ section below for more
-information on library and resource extraction from ``.egg`` files.
-
-
-``eager_resources.txt``
------------------------
-
-A list of resource files and/or directories, one per line, as specified
-via the ``eager_resources`` keyword to ``setup()``. Paths are
-``/``-separated and relative to the egg's base location.
-
-Resource files or directories listed here will be extracted
-simultaneously, if any of the named resources are extracted, or if any
-native libraries listed in ``native_libs.txt`` are extracted. Please
-see the setuptools manual for details on what this feature is used for
-and how it works, as well as the `Zip File Issues`_ section below.
-
-
-``zip-safe`` and ``not-zip-safe``
----------------------------------
-
-These are zero-length files, and either one or the other should exist.
-If ``zip-safe`` exists, it means that the project will work properly
-when installed as an ``.egg`` zipfile, and conversely the existence of
-``not-zip-safe`` means the project should not be installed as an
-``.egg`` file. The ``zip_safe`` option to setuptools' ``setup()``
-determines which file will be written. If the option isn't provided,
-setuptools attempts to make its own assessment of whether the package
-can work, based on code and content analysis.
-
-If neither file is present at installation time, EasyInstall defaults
-to assuming that the project should be unzipped. (Command-line options
-to EasyInstall, however, take precedence even over an existing
-``zip-safe`` or ``not-zip-safe`` file.)
-
-Note that these flag files appear only in ``.egg`` files generated by
-``bdist_egg``, and in ``.egg`` directories created by unpacking such an
-``.egg`` file.
-
-
-
-``top_level.txt`` -- Conflict Management Metadata
-=================================================
-
-This file is a list of the top-level module or package names provided
-by the project, one Python identifier per line.
-
-Subpackages are not included; a project containing both a ``foo.bar``
-and a ``foo.baz`` would include only one line, ``foo``, in its
-``top_level.txt``.
-
-This data is used by ``pkg_resources`` at runtime to issue a warning if
-an egg is added to ``sys.path`` when its contained packages may have
-already been imported.
-
-(It was also once used to detect conflicts with non-egg packages at
-installation time, but in more recent versions, setuptools installs eggs
-in such a way that they always override non-egg packages, thus
-preventing a problem from arising.)
-
-
-``SOURCES.txt`` -- Source Files Manifest
-========================================
-
-This file is roughly equivalent to the distutils' ``MANIFEST`` file.
-The differences are as follows:
-
-* The filenames always use ``/`` as a path separator, which must be
- converted back to a platform-specific path whenever they are read.
-
-* The file is automatically generated by setuptools whenever the
- ``egg_info`` or ``sdist`` commands are run, and it is *not*
- user-editable.
-
-Although this metadata is included with distributed eggs, it is not
-actually used at runtime for any purpose. Its function is to ensure
-that setuptools-built *source* distributions can correctly discover
-what files are part of the project's source, even if the list had been
-generated using revision control metadata on the original author's
-system.
-
-In other words, ``SOURCES.txt`` has little or no runtime value for being
-included in distributed eggs, and it is possible that future versions of
-the ``bdist_egg`` and ``install_egg_info`` commands will strip it before
-installation or distribution. Therefore, do not rely on its being
-available outside of an original source directory or source
-distribution.
-
-
-------------------------------
-Other Technical Considerations
-------------------------------
-
-
-Zip File Issues
-===============
-
-Although zip files resemble directories, they are not fully
-substitutable for them. Most platforms do not support loading dynamic
-link libraries contained in zipfiles, so it is not possible to directly
-import C extensions from ``.egg`` zipfiles. Similarly, there are many
-existing libraries -- whether in Python or C -- that require actual
-operating system filenames, and do not work with arbitrary "file-like"
-objects or in-memory strings, and thus cannot operate directly on the
-contents of zip files.
-
-To address these issues, the ``pkg_resources`` module provides a
-"resource API" to support obtaining either the contents of a resource,
-or a true operating system filename for the resource. If the egg
-containing the resource is a directory, the resource's real filename
-is simply returned. However, if the egg is a zipfile, then the
-resource is first extracted to a cache directory, and the filename
-within the cache is returned.
-
-The cache directory is determined by the ``pkg_resources`` API; please
-see the ``set_cache_path()`` and ``get_default_cache()`` documentation
-for details.
-
-
-The Extraction Process
-----------------------
-
-Resources are extracted to a cache subdirectory whose name is based
-on the enclosing ``.egg`` filename and the path to the resource. If
-there is already a file of the correct name, size, and timestamp, its
-filename is returned to the requester. Otherwise, the desired file is
-extracted first to a temporary name generated using
-``mkstemp(".$extract",target_dir)``, and then its timestamp is set to
-match the one in the zip file, before renaming it to its final name.
-(Some collision detection and resolution code is used to handle the
-fact that Windows doesn't overwrite files when renaming.)
-
-If a resource directory is requested, all of its contents are
-recursively extracted in this fashion, to ensure that the directory
-name can be used as if it were valid all along.
-
-If the resource requested for extraction is listed in the
-``native_libs.txt`` or ``eager_resources.txt`` metadata files, then
-*all* resources listed in *either* file will be extracted before the
-requested resource's filename is returned, thus ensuring that all
-C extensions and data used by them will be simultaneously available.
-
-
-Extension Import Wrappers
--------------------------
-
-Since Python's built-in zip import feature does not support loading
-C extension modules from zipfiles, the setuptools ``bdist_egg`` command
-generates special import wrappers to make it work.
-
-The wrappers are ``.py`` files (along with corresponding ``.pyc``
-and/or ``.pyo`` files) that have the same module name as the
-corresponding C extension. These wrappers are located in the same
-package directory (or top-level directory) within the zipfile, so that
-say, ``foomodule.so`` will get a corresponding ``foo.py``, while
-``bar/baz.pyd`` will get a corresponding ``bar/baz.py``.
-
-These wrapper files contain a short stanza of Python code that asks
-``pkg_resources`` for the filename of the corresponding C extension,
-then reloads the module using the obtained filename. This will cause
-``pkg_resources`` to first ensure that all of the egg's C extensions
-(and any accompanying "eager resources") are extracted to the cache
-before attempting to link to the C library.
-
-Note, by the way, that ``.egg`` directories will also contain these
-wrapper files. However, Python's default import priority is such that
-C extensions take precedence over same-named Python modules, so the
-import wrappers are ignored unless the egg is a zipfile.
-
-
-Installation and Path Management Issues
-=======================================
-
-Python's initial setup of ``sys.path`` is very dependent on the Python
-version and installation platform, as well as how Python was started
-(i.e., script vs. ``-c`` vs. ``-m`` vs. interactive interpreter).
-In fact, Python also provides only two relatively robust ways to affect
-``sys.path`` outside of direct manipulation in code: the ``PYTHONPATH``
-environment variable, and ``.pth`` files.
-
-However, with no cross-platform way to safely and persistently change
-environment variables, this leaves ``.pth`` files as EasyInstall's only
-real option for persistent configuration of ``sys.path``.
-
-But ``.pth`` files are rather strictly limited in what they are allowed
-to do normally. They add directories only to the *end* of ``sys.path``,
-after any locally-installed ``site-packages`` directory, and they are
-only processed *in* the ``site-packages`` directory to start with.
-
-This is a double whammy for users who lack write access to that
-directory, because they can't create a ``.pth`` file that Python will
-read, and even if a sympathetic system administrator adds one for them
-that calls ``site.addsitedir()`` to allow some other directory to
-contain ``.pth`` files, they won't be able to install newer versions of
-anything that's installed in the systemwide ``site-packages``, because
-their paths will still be added *after* ``site-packages``.
-
-So EasyInstall applies two workarounds to solve these problems.
-
-The first is that EasyInstall leverages ``.pth`` files' "import" feature
-to manipulate ``sys.path`` and ensure that anything EasyInstall adds
-to a ``.pth`` file will always appear before both the standard library
-and the local ``site-packages`` directories. Thus, it is always
-possible for a user who can write a Python-read ``.pth`` file to ensure
-that their packages come first in their own environment.
-
-Second, when installing to a ``PYTHONPATH`` directory (as opposed to
-a "site" directory like ``site-packages``) EasyInstall will also install
-a special version of the ``site`` module. Because it's in a
-``PYTHONPATH`` directory, this module will get control before the
-standard library version of ``site`` does. It will record the state of
-``sys.path`` before invoking the "real" ``site`` module, and then
-afterwards it processes any ``.pth`` files found in ``PYTHONPATH``
-directories, including all the fixups needed to ensure that eggs always
-appear before the standard library in sys.path, but are in a relative
-order to one another that is defined by their ``PYTHONPATH`` and
-``.pth``-prescribed sequence.
-
-The net result of these changes is that ``sys.path`` order will be
-as follows at runtime:
-
-1. The ``sys.argv[0]`` directory, or an empty string if no script
- is being executed.
-
-2. All eggs installed by EasyInstall in any ``.pth`` file in each
- ``PYTHONPATH`` directory, in order first by ``PYTHONPATH`` order,
- then normal ``.pth`` processing order (which is to say alphabetical
- by ``.pth`` filename, then by the order of listing within each
- ``.pth`` file).
-
-3. All eggs installed by EasyInstall in any ``.pth`` file in each "site"
- directory (such as ``site-packages``), following the same ordering
- rules as for the ones on ``PYTHONPATH``.
-
-4. The ``PYTHONPATH`` directories themselves, in their original order
-
-5. Any paths from ``.pth`` files found on ``PYTHONPATH`` that were *not*
- eggs installed by EasyInstall, again following the same relative
- ordering rules.
-
-6. The standard library and "site" directories, along with the contents
- of any ``.pth`` files found in the "site" directories.
-
-Notice that sections 1, 4, and 6 comprise the "normal" Python setup for
-``sys.path``. Sections 2 and 3 are inserted to support eggs, and
-section 5 emulates what the "normal" semantics of ``.pth`` files on
-``PYTHONPATH`` would be if Python natively supported them.
-
-For further discussion of the tradeoffs that went into this design, as
-well as notes on the actual magic inserted into ``.pth`` files to make
-them do these things, please see also the following messages to the
-distutils-SIG mailing list:
-
-* http://mail.python.org/pipermail/distutils-sig/2006-February/006026.html
-* http://mail.python.org/pipermail/distutils-sig/2006-March/006123.html
-
-
-Script Wrappers
----------------
-
-EasyInstall never directly installs a project's original scripts to
-a script installation directory. Instead, it writes short wrapper
-scripts that first ensure that the project's dependencies are active
-on sys.path, before invoking the original script. These wrappers
-have a #! line that points to the version of Python that was used to
-install them, and their second line is always a comment that indicates
-the type of script wrapper, the project version required for the script
-to run, and information identifying the script to be invoked.
-
-The format of this marker line is::
-
- "# EASY-INSTALL-" script_type ": " tuple_of_strings "\n"
-
-The ``script_type`` is one of ``SCRIPT``, ``DEV-SCRIPT``, or
-``ENTRY-SCRIPT``. The ``tuple_of_strings`` is a comma-separated
-sequence of Python string constants. For ``SCRIPT`` and ``DEV-SCRIPT``
-wrappers, there are two strings: the project version requirement, and
-the script name (as a filename within the ``scripts`` metadata
-directory). For ``ENTRY-SCRIPT`` wrappers, there are three:
-the project version requirement, the entry point group name, and the
-entry point name. (See the "Automatic Script Creation" section in the
-setuptools manual for more information about entry point scripts.)
-
-In each case, the project version requirement string will be a string
-parseable with the ``pkg_resources`` modules' ``Requirement.parse()``
-classmethod. The only difference between a ``SCRIPT`` wrapper and a
-``DEV-SCRIPT`` is that a ``DEV-SCRIPT`` actually executes the original
-source script in the project's source tree, and is created when the
-"setup.py develop" command is run. A ``SCRIPT`` wrapper, on the other
-hand, uses the "installed" script written to the ``EGG-INFO/scripts``
-subdirectory of the corresponding ``.egg`` zipfile or directory.
-(``.egg-info`` eggs do not have script wrappers associated with them,
-except in the "setup.py develop" case.)
-
-The purpose of including the marker line in generated script wrappers is
-to facilitate introspection of installed scripts, and their relationship
-to installed eggs. For example, an uninstallation tool could use this
-data to identify what scripts can safely be removed, and/or identify
-what scripts would stop working if a particular egg is uninstalled.
-
diff --git a/docs/history.txt b/docs/history.txt
deleted file mode 100644
index 8e217503..00000000
--- a/docs/history.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-:tocdepth: 2
-
-.. _changes:
-
-History
-*******
-
-.. include:: ../CHANGES (links).rst
diff --git a/docs/index.txt b/docs/index.txt
deleted file mode 100644
index 6ac37252..00000000
--- a/docs/index.txt
+++ /dev/null
@@ -1,25 +0,0 @@
-Welcome to Setuptools' documentation!
-=====================================
-
-Setuptools is a fully-featured, actively-maintained, and stable library
-designed to facilitate packaging Python projects, where packaging includes:
-
- - Python package and module definitions
- - Distribution package metadata
- - Test hooks
- - Project installation
- - Platform-specific details
- - Python 3 support
-
-Documentation content:
-
-.. toctree::
- :maxdepth: 2
-
- history
- roadmap
- python3
- setuptools
- easy_install
- pkg_resources
- development
diff --git a/docs/pkg_resources.txt b/docs/pkg_resources.txt
deleted file mode 100644
index 7b979ec3..00000000
--- a/docs/pkg_resources.txt
+++ /dev/null
@@ -1,1952 +0,0 @@
-=============================================================
-Package Discovery and Resource Access using ``pkg_resources``
-=============================================================
-
-The ``pkg_resources`` module distributed with ``setuptools`` provides an API
-for Python libraries to access their resource files, and for extensible
-applications and frameworks to automatically discover plugins. It also
-provides runtime support for using C extensions that are inside zipfile-format
-eggs, support for merging packages that have separately-distributed modules or
-subpackages, and APIs for managing Python's current "working set" of active
-packages.
-
-
-.. contents:: **Table of Contents**
-
-
---------
-Overview
---------
-
-The ``pkg_resources`` module provides runtime facilities for finding,
-introspecting, activating and using installed Python distributions. Some
-of the more advanced features (notably the support for parallel installation
-of multiple versions) rely specifically on the "egg" format (either as a
-zip archive or subdirectory), while others (such as plugin discovery) will
-work correctly so long as "egg-info" metadata directories are available for
-relevant distributions.
-
-Eggs are a distribution format for Python modules, similar in concept to
-Java's "jars" or Ruby's "gems", or the "wheel" format defined in PEP 427.
-However, unlike a pure distribution format, eggs can also be installed and
-added directly to ``sys.path`` as an import location. When installed in
-this way, eggs are *discoverable*, meaning that they carry metadata that
-unambiguously identifies their contents and dependencies. This means that
-an installed egg can be *automatically* found and added to ``sys.path`` in
-response to simple requests of the form, "get me everything I need to use
-docutils' PDF support". This feature allows mutually conflicting versions of
-a distribution to co-exist in the same Python installation, with individual
-applications activating the desired version at runtime by manipulating the
-contents of ``sys.path`` (this differs from the virtual environment
-approach, which involves creating isolated environments for each
-application).
-
-The following terms are needed in order to explain the capabilities offered
-by this module:
-
-project
- A library, framework, script, plugin, application, or collection of data
- or other resources, or some combination thereof. Projects are assumed to
- have "relatively unique" names, e.g. names registered with PyPI.
-
-release
- A snapshot of a project at a particular point in time, denoted by a version
- identifier.
-
-distribution
- A file or files that represent a particular release.
-
-importable distribution
- A file or directory that, if placed on ``sys.path``, allows Python to
- import any modules contained within it.
-
-pluggable distribution
- An importable distribution whose filename unambiguously identifies its
- release (i.e. project and version), and whose contents unambiguously
- specify what releases of other projects will satisfy its runtime
- requirements.
-
-extra
- An "extra" is an optional feature of a release, that may impose additional
- runtime requirements. For example, if docutils PDF support required a
- PDF support library to be present, docutils could define its PDF support as
- an "extra", and list what other project releases need to be available in
- order to provide it.
-
-environment
- A collection of distributions potentially available for importing, but not
- necessarily active. More than one distribution (i.e. release version) for
- a given project may be present in an environment.
-
-working set
- A collection of distributions actually available for importing, as on
- ``sys.path``. At most one distribution (release version) of a given
- project may be present in a working set, as otherwise there would be
- ambiguity as to what to import.
-
-eggs
- Eggs are pluggable distributions in one of the three formats currently
- supported by ``pkg_resources``. There are built eggs, development eggs,
- and egg links. Built eggs are directories or zipfiles whose name ends
- with ``.egg`` and follows the egg naming conventions, and contain an
- ``EGG-INFO`` subdirectory (zipped or otherwise). Development eggs are
- normal directories of Python code with one or more ``ProjectName.egg-info``
- subdirectories. The development egg format is also used to provide a
- default version of a distribution that is available to software that
- doesn't use ``pkg_resources`` to request specific versions. Egg links
- are ``*.egg-link`` files that contain the name of a built or
- development egg, to support symbolic linking on platforms that do not
- have native symbolic links (or where the symbolic link support is
- limited).
-
-(For more information about these terms and concepts, see also this
-`architectural overview`_ of ``pkg_resources`` and Python Eggs in general.)
-
-.. _architectural overview: http://mail.python.org/pipermail/distutils-sig/2005-June/004652.html
-
-
-.. -----------------
-.. Developer's Guide
-.. -----------------
-
-.. This section isn't written yet. Currently planned topics include
- Accessing Resources
- Finding and Activating Package Distributions
- get_provider()
- require()
- WorkingSet
- iter_distributions
- Running Scripts
- Configuration
- Namespace Packages
- Extensible Applications and Frameworks
- Locating entry points
- Activation listeners
- Metadata access
- Extended Discovery and Installation
- Supporting Custom PEP 302 Implementations
-.. For now, please check out the extensive `API Reference`_ below.
-
-
--------------
-API Reference
--------------
-
-Namespace Package Support
-=========================
-
-A namespace package is a package that only contains other packages and modules,
-with no direct contents of its own. Such packages can be split across
-multiple, separately-packaged distributions. They are normally used to split
-up large packages produced by a single organization, such as in the ``zope``
-namespace package for Zope Corporation packages, and the ``peak`` namespace
-package for the Python Enterprise Application Kit.
-
-To create a namespace package, you list it in the ``namespace_packages``
-argument to ``setup()``, in your project's ``setup.py``. (See the `setuptools
-documentation on namespace packages`_ for more information on this.) Also,
-you must add a ``declare_namespace()`` call in the package's ``__init__.py``
-file(s):
-
-``declare_namespace(name)``
- Declare that the dotted package name `name` is a "namespace package" whose
- contained packages and modules may be spread across multiple distributions.
- The named package's ``__path__`` will be extended to include the
- corresponding package in all distributions on ``sys.path`` that contain a
- package of that name. (More precisely, if an importer's
- ``find_module(name)`` returns a loader, then it will also be searched for
- the package's contents.) Whenever a Distribution's ``activate()`` method
- is invoked, it checks for the presence of namespace packages and updates
- their ``__path__`` contents accordingly.
-
-Applications that manipulate namespace packages or directly alter ``sys.path``
-at runtime may also need to use this API function:
-
-``fixup_namespace_packages(path_item)``
- Declare that `path_item` is a newly added item on ``sys.path`` that may
- need to be used to update existing namespace packages. Ordinarily, this is
- called for you when an egg is automatically added to ``sys.path``, but if
- your application modifies ``sys.path`` to include locations that may
- contain portions of a namespace package, you will need to call this
- function to ensure they are added to the existing namespace packages.
-
-Although by default ``pkg_resources`` only supports namespace packages for
-filesystem and zip importers, you can extend its support to other "importers"
-compatible with PEP 302 using the ``register_namespace_handler()`` function.
-See the section below on `Supporting Custom Importers`_ for details.
-
-.. _setuptools documentation on namespace packages: http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
-
-
-``WorkingSet`` Objects
-======================
-
-The ``WorkingSet`` class provides access to a collection of "active"
-distributions. In general, there is only one meaningful ``WorkingSet``
-instance: the one that represents the distributions that are currently active
-on ``sys.path``. This global instance is available under the name
-``working_set`` in the ``pkg_resources`` module. However, specialized
-tools may wish to manipulate working sets that don't correspond to
-``sys.path``, and therefore may wish to create other ``WorkingSet`` instances.
-
-It's important to note that the global ``working_set`` object is initialized
-from ``sys.path`` when ``pkg_resources`` is first imported, but is only updated
-if you do all future ``sys.path`` manipulation via ``pkg_resources`` APIs. If
-you manually modify ``sys.path``, you must invoke the appropriate methods on
-the ``working_set`` instance to keep it in sync. Unfortunately, Python does
-not provide any way to detect arbitrary changes to a list object like
-``sys.path``, so ``pkg_resources`` cannot automatically update the
-``working_set`` based on changes to ``sys.path``.
-
-``WorkingSet(entries=None)``
- Create a ``WorkingSet`` from an iterable of path entries. If `entries`
- is not supplied, it defaults to the value of ``sys.path`` at the time
- the constructor is called.
-
- Note that you will not normally construct ``WorkingSet`` instances
- yourself, but instead you will implicitly or explicitly use the global
- ``working_set`` instance. For the most part, the ``pkg_resources`` API
- is designed so that the ``working_set`` is used by default, such that you
- don't have to explicitly refer to it most of the time.
-
-All distributions available directly on ``sys.path`` will be activated
-automatically when ``pkg_resources`` is imported. This behaviour can cause
-version conflicts for applications which require non-default versions of
-those distributions. To handle this situation, ``pkg_resources`` checks for a
-``__requires__`` attribute in the ``__main__`` module when initializing the
-default working set, and uses this to ensure a suitable version of each
-affected distribution is activated. For example::
-
- __requires__ = ["CherryPy < 3"] # Must be set before pkg_resources import
- import pkg_resources
-
-
-Basic ``WorkingSet`` Methods
-----------------------------
-
-The following methods of ``WorkingSet`` objects are also available as module-
-level functions in ``pkg_resources`` that apply to the default ``working_set``
-instance. Thus, you can use e.g. ``pkg_resources.require()`` as an
-abbreviation for ``pkg_resources.working_set.require()``:
-
-
-``require(*requirements)``
- Ensure that distributions matching `requirements` are activated
-
- `requirements` must be a string or a (possibly-nested) sequence
- thereof, specifying the distributions and versions required. The
- return value is a sequence of the distributions that needed to be
- activated to fulfill the requirements; all relevant distributions are
- included, even if they were already activated in this working set.
-
- For the syntax of requirement specifiers, see the section below on
- `Requirements Parsing`_.
-
- In general, it should not be necessary for you to call this method
- directly. It's intended more for use in quick-and-dirty scripting and
- interactive interpreter hacking than for production use. If you're creating
- an actual library or application, it's strongly recommended that you create
- a "setup.py" script using ``setuptools``, and declare all your requirements
- there. That way, tools like EasyInstall can automatically detect what
- requirements your package has, and deal with them accordingly.
-
- Note that calling ``require('SomePackage')`` will not install
- ``SomePackage`` if it isn't already present. If you need to do this, you
- should use the ``resolve()`` method instead, which allows you to pass an
- ``installer`` callback that will be invoked when a needed distribution
- can't be found on the local machine. You can then have this callback
- display a dialog, automatically download the needed distribution, or
- whatever else is appropriate for your application. See the documentation
- below on the ``resolve()`` method for more information, and also on the
- ``obtain()`` method of ``Environment`` objects.
-
-``run_script(requires, script_name)``
- Locate distribution specified by `requires` and run its `script_name`
- script. `requires` must be a string containing a requirement specifier.
- (See `Requirements Parsing`_ below for the syntax.)
-
- The script, if found, will be executed in *the caller's globals*. That's
- because this method is intended to be called from wrapper scripts that
- act as a proxy for the "real" scripts in a distribution. A wrapper script
- usually doesn't need to do anything but invoke this function with the
- correct arguments.
-
- If you need more control over the script execution environment, you
- probably want to use the ``run_script()`` method of a ``Distribution``
- object's `Metadata API`_ instead.
-
-``iter_entry_points(group, name=None)``
- Yield entry point objects from `group` matching `name`
-
- If `name` is None, yields all entry points in `group` from all
- distributions in the working set, otherwise only ones matching both
- `group` and `name` are yielded. Entry points are yielded from the active
- distributions in the order that the distributions appear in the working
- set. (For the global ``working_set``, this should be the same as the order
- that they are listed in ``sys.path``.) Note that within the entry points
- advertised by an individual distribution, there is no particular ordering.
-
- Please see the section below on `Entry Points`_ for more information.
-
-
-``WorkingSet`` Methods and Attributes
--------------------------------------
-
-These methods are used to query or manipulate the contents of a specific
-working set, so they must be explicitly invoked on a particular ``WorkingSet``
-instance:
-
-``add_entry(entry)``
- Add a path item to the ``entries``, finding any distributions on it. You
- should use this when you add additional items to ``sys.path`` and you want
- the global ``working_set`` to reflect the change. This method is also
- called by the ``WorkingSet()`` constructor during initialization.
-
- This method uses ``find_distributions(entry,True)`` to find distributions
- corresponding to the path entry, and then ``add()`` them. `entry` is
- always appended to the ``entries`` attribute, even if it is already
- present, however. (This is because ``sys.path`` can contain the same value
- more than once, and the ``entries`` attribute should be able to reflect
- this.)
-
-``__contains__(dist)``
- True if `dist` is active in this ``WorkingSet``. Note that only one
- distribution for a given project can be active in a given ``WorkingSet``.
-
-``__iter__()``
- Yield distributions for non-duplicate projects in the working set.
- The yield order is the order in which the items' path entries were
- added to the working set.
-
-``find(req)``
- Find a distribution matching `req` (a ``Requirement`` instance).
- If there is an active distribution for the requested project, this
- returns it, as long as it meets the version requirement specified by
- `req`. But, if there is an active distribution for the project and it
- does *not* meet the `req` requirement, ``VersionConflict`` is raised.
- If there is no active distribution for the requested project, ``None``
- is returned.
-
-``resolve(requirements, env=None, installer=None)``
- List all distributions needed to (recursively) meet `requirements`
-
- `requirements` must be a sequence of ``Requirement`` objects. `env`,
- if supplied, should be an ``Environment`` instance. If
- not supplied, an ``Environment`` is created from the working set's
- ``entries``. `installer`, if supplied, will be invoked with each
- requirement that cannot be met by an already-installed distribution; it
- should return a ``Distribution`` or ``None``. (See the ``obtain()`` method
- of `Environment Objects`_, below, for more information on the `installer`
- argument.)
-
-``add(dist, entry=None)``
- Add `dist` to working set, associated with `entry`
-
- If `entry` is unspecified, it defaults to ``dist.location``. On exit from
- this routine, `entry` is added to the end of the working set's ``.entries``
- (if it wasn't already present).
-
- `dist` is only added to the working set if it's for a project that
- doesn't already have a distribution active in the set. If it's
- successfully added, any callbacks registered with the ``subscribe()``
- method will be called. (See `Receiving Change Notifications`_, below.)
-
- Note: ``add()`` is automatically called for you by the ``require()``
- method, so you don't normally need to use this method directly.
-
-``entries``
- This attribute represents a "shadow" ``sys.path``, primarily useful for
- debugging. If you are experiencing import problems, you should check
- the global ``working_set`` object's ``entries`` against ``sys.path``, to
- ensure that they match. If they do not, then some part of your program
- is manipulating ``sys.path`` without updating the ``working_set``
- accordingly. IMPORTANT NOTE: do not directly manipulate this attribute!
- Setting it equal to ``sys.path`` will not fix your problem, any more than
- putting black tape over an "engine warning" light will fix your car! If
- this attribute is out of sync with ``sys.path``, it's merely an *indicator*
- of the problem, not the cause of it.
-
-
-Receiving Change Notifications
-------------------------------
-
-Extensible applications and frameworks may need to receive notification when
-a new distribution (such as a plug-in component) has been added to a working
-set. This is what the ``subscribe()`` method and ``add_activation_listener()``
-function are for.
-
-``subscribe(callback)``
- Invoke ``callback(distribution)`` once for each active distribution that is
- in the set now, or gets added later. Because the callback is invoked for
- already-active distributions, you do not need to loop over the working set
- yourself to deal with the existing items; just register the callback and
- be prepared for the fact that it will be called immediately by this method.
-
- Note that callbacks *must not* allow exceptions to propagate, or they will
- interfere with the operation of other callbacks and possibly result in an
- inconsistent working set state. Callbacks should use a try/except block
- to ignore, log, or otherwise process any errors, especially since the code
- that caused the callback to be invoked is unlikely to be able to handle
- the errors any better than the callback itself.
-
-``pkg_resources.add_activation_listener()`` is an alternate spelling of
-``pkg_resources.working_set.subscribe()``.
-
-
-Locating Plugins
-----------------
-
-Extensible applications will sometimes have a "plugin directory" or a set of
-plugin directories, from which they want to load entry points or other
-metadata. The ``find_plugins()`` method allows you to do this, by scanning an
-environment for the newest version of each project that can be safely loaded
-without conflicts or missing requirements.
-
-``find_plugins(plugin_env, full_env=None, fallback=True)``
- Scan `plugin_env` and identify which distributions could be added to this
- working set without version conflicts or missing requirements.
-
- Example usage::
-
- distributions, errors = working_set.find_plugins(
- Environment(plugin_dirlist)
- )
- map(working_set.add, distributions) # add plugins+libs to sys.path
- print "Couldn't load", errors # display errors
-
- The `plugin_env` should be an ``Environment`` instance that contains only
- distributions that are in the project's "plugin directory" or directories.
- The `full_env`, if supplied, should be an ``Environment`` instance that
- contains all currently-available distributions.
-
- If `full_env` is not supplied, one is created automatically from the
- ``WorkingSet`` this method is called on, which will typically mean that
- every directory on ``sys.path`` will be scanned for distributions.
-
- This method returns a 2-tuple: (`distributions`, `error_info`), where
- `distributions` is a list of the distributions found in `plugin_env` that
- were loadable, along with any other distributions that are needed to resolve
- their dependencies. `error_info` is a dictionary mapping unloadable plugin
- distributions to an exception instance describing the error that occurred.
- Usually this will be a ``DistributionNotFound`` or ``VersionConflict``
- instance.
-
- Most applications will use this method mainly on the master ``working_set``
- instance in ``pkg_resources``, and then immediately add the returned
- distributions to the working set so that they are available on sys.path.
- This will make it possible to find any entry points, and allow any other
- metadata tracking and hooks to be activated.
-
- The resolution algorithm used by ``find_plugins()`` is as follows. First,
- the project names of the distributions present in `plugin_env` are sorted.
- Then, each project's eggs are tried in descending version order (i.e.,
- newest version first).
-
- An attempt is made to resolve each egg's dependencies. If the attempt is
- successful, the egg and its dependencies are added to the output list and to
- a temporary copy of the working set. The resolution process continues with
- the next project name, and no older eggs for that project are tried.
-
- If the resolution attempt fails, however, the error is added to the error
- dictionary. If the `fallback` flag is true, the next older version of the
- plugin is tried, until a working version is found. If false, the resolution
- process continues with the next plugin project name.
-
- Some applications may have stricter fallback requirements than others. For
- example, an application that has a database schema or persistent objects
- may not be able to safely downgrade a version of a package. Others may want
- to ensure that a new plugin configuration is either 100% good or else
- revert to a known-good configuration. (That is, they may wish to revert to
- a known configuration if the `error_info` return value is non-empty.)
-
- Note that this algorithm gives precedence to satisfying the dependencies of
- alphabetically prior project names in case of version conflicts. If two
- projects named "AaronsPlugin" and "ZekesPlugin" both need different versions
- of "TomsLibrary", then "AaronsPlugin" will win and "ZekesPlugin" will be
- disabled due to version conflict.
-
-
-``Environment`` Objects
-=======================
-
-An "environment" is a collection of ``Distribution`` objects, usually ones
-that are present and potentially importable on the current platform.
-``Environment`` objects are used by ``pkg_resources`` to index available
-distributions during dependency resolution.
-
-``Environment(search_path=None, platform=get_supported_platform(), python=PY_MAJOR)``
- Create an environment snapshot by scanning `search_path` for distributions
- compatible with `platform` and `python`. `search_path` should be a
- sequence of strings such as might be used on ``sys.path``. If a
- `search_path` isn't supplied, ``sys.path`` is used.
-
- `platform` is an optional string specifying the name of the platform
- that platform-specific distributions must be compatible with. If
- unspecified, it defaults to the current platform. `python` is an
- optional string naming the desired version of Python (e.g. ``'2.4'``);
- it defaults to the currently-running version.
-
- You may explicitly set `platform` (and/or `python`) to ``None`` if you
- wish to include *all* distributions, not just those compatible with the
- running platform or Python version.
-
- Note that `search_path` is scanned immediately for distributions, and the
- resulting ``Environment`` is a snapshot of the found distributions. It
- is not automatically updated if the system's state changes due to e.g.
- installation or removal of distributions.
-
-``__getitem__(project_name)``
- Returns a list of distributions for the given project name, ordered
- from newest to oldest version. (And highest to lowest format precedence
- for distributions that contain the same version of the project.) If there
- are no distributions for the project, returns an empty list.
-
-``__iter__()``
- Yield the unique project names of the distributions in this environment.
- The yielded names are always in lower case.
-
-``add(dist)``
- Add `dist` to the environment if it matches the platform and python version
- specified at creation time, and only if the distribution hasn't already
- been added. (i.e., adding the same distribution more than once is a no-op.)
-
-``remove(dist)``
- Remove `dist` from the environment.
-
-``can_add(dist)``
- Is distribution `dist` acceptable for this environment? If it's not
- compatible with the ``platform`` and ``python`` version values specified
- when the environment was created, a false value is returned.
-
-``__add__(dist_or_env)`` (``+`` operator)
- Add a distribution or environment to an ``Environment`` instance, returning
- a *new* environment object that contains all the distributions previously
- contained by both. The new environment will have a ``platform`` and
- ``python`` of ``None``, meaning that it will not reject any distributions
- from being added to it; it will simply accept whatever is added. If you
- want the added items to be filtered for platform and Python version, or
- you want to add them to the *same* environment instance, you should use
- in-place addition (``+=``) instead.
-
-``__iadd__(dist_or_env)`` (``+=`` operator)
- Add a distribution or environment to an ``Environment`` instance
- *in-place*, updating the existing instance and returning it. The
- ``platform`` and ``python`` filter attributes take effect, so distributions
- in the source that do not have a suitable platform string or Python version
- are silently ignored.
-
-``best_match(req, working_set, installer=None)``
- Find distribution best matching `req` and usable on `working_set`
-
- This calls the ``find(req)`` method of the `working_set` to see if a
- suitable distribution is already active. (This may raise
- ``VersionConflict`` if an unsuitable version of the project is already
- active in the specified `working_set`.) If a suitable distribution isn't
- active, this method returns the newest distribution in the environment
- that meets the ``Requirement`` in `req`. If no suitable distribution is
- found, and `installer` is supplied, then the result of calling
- the environment's ``obtain(req, installer)`` method will be returned.
-
-``obtain(requirement, installer=None)``
- Obtain a distro that matches requirement (e.g. via download). In the
- base ``Environment`` class, this routine just returns
- ``installer(requirement)``, unless `installer` is None, in which case
- None is returned instead. This method is a hook that allows subclasses
- to attempt other ways of obtaining a distribution before falling back
- to the `installer` argument.
-
-``scan(search_path=None)``
- Scan `search_path` for distributions usable on `platform`
-
- Any distributions found are added to the environment. `search_path` should
- be a sequence of strings such as might be used on ``sys.path``. If not
- supplied, ``sys.path`` is used. Only distributions conforming to
- the platform/python version defined at initialization are added. This
- method is a shortcut for using the ``find_distributions()`` function to
- find the distributions from each item in `search_path`, and then calling
- ``add()`` to add each one to the environment.
-
-
-``Requirement`` Objects
-=======================
-
-``Requirement`` objects express what versions of a project are suitable for
-some purpose. These objects (or their string form) are used by various
-``pkg_resources`` APIs in order to find distributions that a script or
-distribution needs.
-
-
-Requirements Parsing
---------------------
-
-``parse_requirements(s)``
- Yield ``Requirement`` objects for a string or iterable of lines. Each
- requirement must start on a new line. See below for syntax.
-
-``Requirement.parse(s)``
- Create a ``Requirement`` object from a string or iterable of lines. A
- ``ValueError`` is raised if the string or lines do not contain a valid
- requirement specifier, or if they contain more than one specifier. (To
- parse multiple specifiers from a string or iterable of strings, use
- ``parse_requirements()`` instead.)
-
- The syntax of a requirement specifier is defined in full in PEP 508.
-
- Some examples of valid requirement specifiers::
-
- FooProject >= 1.2
- Fizzy [foo, bar]
- PickyThing<1.6,>1.9,!=1.9.6,<2.0a0,==2.4c1
- SomethingWhoseVersionIDontCareAbout
- SomethingWithMarker[foo]>1.0;python_version<"2.7"
-
- The project name is the only required portion of a requirement string, and
- if it's the only thing supplied, the requirement will accept any version
- of that project.
-
- The "extras" in a requirement are used to request optional features of a
- project, that may require additional project distributions in order to
- function. For example, if the hypothetical "Report-O-Rama" project offered
- optional PDF support, it might require an additional library in order to
- provide that support. Thus, a project needing Report-O-Rama's PDF features
- could use a requirement of ``Report-O-Rama[PDF]`` to request installation
- or activation of both Report-O-Rama and any libraries it needs in order to
- provide PDF support. For example, you could use::
-
- easy_install.py Report-O-Rama[PDF]
-
- To install the necessary packages using the EasyInstall program, or call
- ``pkg_resources.require('Report-O-Rama[PDF]')`` to add the necessary
- distributions to sys.path at runtime.
-
- The "markers" in a requirement are used to specify when a requirement
- should be installed -- the requirement will be installed if the marker
- evaluates as true in the current environment. For example, specifying
- ``argparse;python_version<"2.7"`` will not install in an Python 2.7 or 3.3
- environment, but will in a Python 2.6 environment.
-
-``Requirement`` Methods and Attributes
---------------------------------------
-
-``__contains__(dist_or_version)``
- Return true if `dist_or_version` fits the criteria for this requirement.
- If `dist_or_version` is a ``Distribution`` object, its project name must
- match the requirement's project name, and its version must meet the
- requirement's version criteria. If `dist_or_version` is a string, it is
- parsed using the ``parse_version()`` utility function. Otherwise, it is
- assumed to be an already-parsed version.
-
- The ``Requirement`` object's version specifiers (``.specs``) are internally
- sorted into ascending version order, and used to establish what ranges of
- versions are acceptable. Adjacent redundant conditions are effectively
- consolidated (e.g. ``">1, >2"`` produces the same results as ``">2"``, and
- ``"<2,<3"`` produces the same results as``"<2"``). ``"!="`` versions are
- excised from the ranges they fall within. The version being tested for
- acceptability is then checked for membership in the resulting ranges.
-
-``__eq__(other_requirement)``
- A requirement compares equal to another requirement if they have
- case-insensitively equal project names, version specifiers, and "extras".
- (The order that extras and version specifiers are in is also ignored.)
- Equal requirements also have equal hashes, so that requirements can be
- used in sets or as dictionary keys.
-
-``__str__()``
- The string form of a ``Requirement`` is a string that, if passed to
- ``Requirement.parse()``, would return an equal ``Requirement`` object.
-
-``project_name``
- The name of the required project
-
-``key``
- An all-lowercase version of the ``project_name``, useful for comparison
- or indexing.
-
-``extras``
- A tuple of names of "extras" that this requirement calls for. (These will
- be all-lowercase and normalized using the ``safe_extra()`` parsing utility
- function, so they may not exactly equal the extras the requirement was
- created with.)
-
-``specs``
- A list of ``(op,version)`` tuples, sorted in ascending parsed-version
- order. The `op` in each tuple is a comparison operator, represented as
- a string. The `version` is the (unparsed) version number.
-
-``marker``
- An instance of ``packaging.markers.Marker`` that allows evaluation
- against the current environment. May be None if no marker specified.
-
-``url``
- The location to download the requirement from if specified.
-
-Entry Points
-============
-
-Entry points are a simple way for distributions to "advertise" Python objects
-(such as functions or classes) for use by other distributions. Extensible
-applications and frameworks can search for entry points with a particular name
-or group, either from a specific distribution or from all active distributions
-on sys.path, and then inspect or load the advertised objects at will.
-
-Entry points belong to "groups" which are named with a dotted name similar to
-a Python package or module name. For example, the ``setuptools`` package uses
-an entry point named ``distutils.commands`` in order to find commands defined
-by distutils extensions. ``setuptools`` treats the names of entry points
-defined in that group as the acceptable commands for a setup script.
-
-In a similar way, other packages can define their own entry point groups,
-either using dynamic names within the group (like ``distutils.commands``), or
-possibly using predefined names within the group. For example, a blogging
-framework that offers various pre- or post-publishing hooks might define an
-entry point group and look for entry points named "pre_process" and
-"post_process" within that group.
-
-To advertise an entry point, a project needs to use ``setuptools`` and provide
-an ``entry_points`` argument to ``setup()`` in its setup script, so that the
-entry points will be included in the distribution's metadata. For more
-details, see the ``setuptools`` documentation. (XXX link here to setuptools)
-
-Each project distribution can advertise at most one entry point of a given
-name within the same entry point group. For example, a distutils extension
-could advertise two different ``distutils.commands`` entry points, as long as
-they had different names. However, there is nothing that prevents *different*
-projects from advertising entry points of the same name in the same group. In
-some cases, this is a desirable thing, since the application or framework that
-uses the entry points may be calling them as hooks, or in some other way
-combining them. It is up to the application or framework to decide what to do
-if multiple distributions advertise an entry point; some possibilities include
-using both entry points, displaying an error message, using the first one found
-in sys.path order, etc.
-
-
-Convenience API
----------------
-
-In the following functions, the `dist` argument can be a ``Distribution``
-instance, a ``Requirement`` instance, or a string specifying a requirement
-(i.e. project name, version, etc.). If the argument is a string or
-``Requirement``, the specified distribution is located (and added to sys.path
-if not already present). An error will be raised if a matching distribution is
-not available.
-
-The `group` argument should be a string containing a dotted identifier,
-identifying an entry point group. If you are defining an entry point group,
-you should include some portion of your package's name in the group name so as
-to avoid collision with other packages' entry point groups.
-
-``load_entry_point(dist, group, name)``
- Load the named entry point from the specified distribution, or raise
- ``ImportError``.
-
-``get_entry_info(dist, group, name)``
- Return an ``EntryPoint`` object for the given `group` and `name` from
- the specified distribution. Returns ``None`` if the distribution has not
- advertised a matching entry point.
-
-``get_entry_map(dist, group=None)``
- Return the distribution's entry point map for `group`, or the full entry
- map for the distribution. This function always returns a dictionary,
- even if the distribution advertises no entry points. If `group` is given,
- the dictionary maps entry point names to the corresponding ``EntryPoint``
- object. If `group` is None, the dictionary maps group names to
- dictionaries that then map entry point names to the corresponding
- ``EntryPoint`` instance in that group.
-
-``iter_entry_points(group, name=None)``
- Yield entry point objects from `group` matching `name`.
-
- If `name` is None, yields all entry points in `group` from all
- distributions in the working set on sys.path, otherwise only ones matching
- both `group` and `name` are yielded. Entry points are yielded from
- the active distributions in the order that the distributions appear on
- sys.path. (Within entry points for a particular distribution, however,
- there is no particular ordering.)
-
- (This API is actually a method of the global ``working_set`` object; see
- the section above on `Basic WorkingSet Methods`_ for more information.)
-
-
-Creating and Parsing
---------------------
-
-``EntryPoint(name, module_name, attrs=(), extras=(), dist=None)``
- Create an ``EntryPoint`` instance. `name` is the entry point name. The
- `module_name` is the (dotted) name of the module containing the advertised
- object. `attrs` is an optional tuple of names to look up from the
- module to obtain the advertised object. For example, an `attrs` of
- ``("foo","bar")`` and a `module_name` of ``"baz"`` would mean that the
- advertised object could be obtained by the following code::
-
- import baz
- advertised_object = baz.foo.bar
-
- The `extras` are an optional tuple of "extra feature" names that the
- distribution needs in order to provide this entry point. When the
- entry point is loaded, these extra features are looked up in the `dist`
- argument to find out what other distributions may need to be activated
- on sys.path; see the ``load()`` method for more details. The `extras`
- argument is only meaningful if `dist` is specified. `dist` must be
- a ``Distribution`` instance.
-
-``EntryPoint.parse(src, dist=None)`` (classmethod)
- Parse a single entry point from string `src`
-
- Entry point syntax follows the form::
-
- name = some.module:some.attr [extra1,extra2]
-
- The entry name and module name are required, but the ``:attrs`` and
- ``[extras]`` parts are optional, as is the whitespace shown between
- some of the items. The `dist` argument is passed through to the
- ``EntryPoint()`` constructor, along with the other values parsed from
- `src`.
-
-``EntryPoint.parse_group(group, lines, dist=None)`` (classmethod)
- Parse `lines` (a string or sequence of lines) to create a dictionary
- mapping entry point names to ``EntryPoint`` objects. ``ValueError`` is
- raised if entry point names are duplicated, if `group` is not a valid
- entry point group name, or if there are any syntax errors. (Note: the
- `group` parameter is used only for validation and to create more
- informative error messages.) If `dist` is provided, it will be used to
- set the ``dist`` attribute of the created ``EntryPoint`` objects.
-
-``EntryPoint.parse_map(data, dist=None)`` (classmethod)
- Parse `data` into a dictionary mapping group names to dictionaries mapping
- entry point names to ``EntryPoint`` objects. If `data` is a dictionary,
- then the keys are used as group names and the values are passed to
- ``parse_group()`` as the `lines` argument. If `data` is a string or
- sequence of lines, it is first split into .ini-style sections (using
- the ``split_sections()`` utility function) and the section names are used
- as group names. In either case, the `dist` argument is passed through to
- ``parse_group()`` so that the entry points will be linked to the specified
- distribution.
-
-
-``EntryPoint`` Objects
-----------------------
-
-For simple introspection, ``EntryPoint`` objects have attributes that
-correspond exactly to the constructor argument names: ``name``,
-``module_name``, ``attrs``, ``extras``, and ``dist`` are all available. In
-addition, the following methods are provided:
-
-``load(require=True, env=None, installer=None)``
- Load the entry point, returning the advertised Python object, or raise
- ``ImportError`` if it cannot be obtained. If `require` is a true value,
- then ``require(env, installer)`` is called before attempting the import.
-
-``require(env=None, installer=None)``
- Ensure that any "extras" needed by the entry point are available on
- sys.path. ``UnknownExtra`` is raised if the ``EntryPoint`` has ``extras``,
- but no ``dist``, or if the named extras are not defined by the
- distribution. If `env` is supplied, it must be an ``Environment``, and it
- will be used to search for needed distributions if they are not already
- present on sys.path. If `installer` is supplied, it must be a callable
- taking a ``Requirement`` instance and returning a matching importable
- ``Distribution`` instance or None.
-
-``__str__()``
- The string form of an ``EntryPoint`` is a string that could be passed to
- ``EntryPoint.parse()`` to produce an equivalent ``EntryPoint``.
-
-
-``Distribution`` Objects
-========================
-
-``Distribution`` objects represent collections of Python code that may or may
-not be importable, and may or may not have metadata and resources associated
-with them. Their metadata may include information such as what other projects
-the distribution depends on, what entry points the distribution advertises, and
-so on.
-
-
-Getting or Creating Distributions
----------------------------------
-
-Most commonly, you'll obtain ``Distribution`` objects from a ``WorkingSet`` or
-an ``Environment``. (See the sections above on `WorkingSet Objects`_ and
-`Environment Objects`_, which are containers for active distributions and
-available distributions, respectively.) You can also obtain ``Distribution``
-objects from one of these high-level APIs:
-
-``find_distributions(path_item, only=False)``
- Yield distributions accessible via `path_item`. If `only` is true, yield
- only distributions whose ``location`` is equal to `path_item`. In other
- words, if `only` is true, this yields any distributions that would be
- importable if `path_item` were on ``sys.path``. If `only` is false, this
- also yields distributions that are "in" or "under" `path_item`, but would
- not be importable unless their locations were also added to ``sys.path``.
-
-``get_distribution(dist_spec)``
- Return a ``Distribution`` object for a given ``Requirement`` or string.
- If `dist_spec` is already a ``Distribution`` instance, it is returned.
- If it is a ``Requirement`` object or a string that can be parsed into one,
- it is used to locate and activate a matching distribution, which is then
- returned.
-
-However, if you're creating specialized tools for working with distributions,
-or creating a new distribution format, you may also need to create
-``Distribution`` objects directly, using one of the three constructors below.
-
-These constructors all take an optional `metadata` argument, which is used to
-access any resources or metadata associated with the distribution. `metadata`
-must be an object that implements the ``IResourceProvider`` interface, or None.
-If it is None, an ``EmptyProvider`` is used instead. ``Distribution`` objects
-implement both the `IResourceProvider`_ and `IMetadataProvider Methods`_ by
-delegating them to the `metadata` object.
-
-``Distribution.from_location(location, basename, metadata=None, **kw)`` (classmethod)
- Create a distribution for `location`, which must be a string such as a
- URL, filename, or other string that might be used on ``sys.path``.
- `basename` is a string naming the distribution, like ``Foo-1.2-py2.4.egg``.
- If `basename` ends with ``.egg``, then the project's name, version, python
- version and platform are extracted from the filename and used to set those
- properties of the created distribution. Any additional keyword arguments
- are forwarded to the ``Distribution()`` constructor.
-
-``Distribution.from_filename(filename, metadata=None**kw)`` (classmethod)
- Create a distribution by parsing a local filename. This is a shorter way
- of saying ``Distribution.from_location(normalize_path(filename),
- os.path.basename(filename), metadata)``. In other words, it creates a
- distribution whose location is the normalize form of the filename, parsing
- name and version information from the base portion of the filename. Any
- additional keyword arguments are forwarded to the ``Distribution()``
- constructor.
-
-``Distribution(location,metadata,project_name,version,py_version,platform,precedence)``
- Create a distribution by setting its properties. All arguments are
- optional and default to None, except for `py_version` (which defaults to
- the current Python version) and `precedence` (which defaults to
- ``EGG_DIST``; for more details see ``precedence`` under `Distribution
- Attributes`_ below). Note that it's usually easier to use the
- ``from_filename()`` or ``from_location()`` constructors than to specify
- all these arguments individually.
-
-
-``Distribution`` Attributes
----------------------------
-
-location
- A string indicating the distribution's location. For an importable
- distribution, this is the string that would be added to ``sys.path`` to
- make it actively importable. For non-importable distributions, this is
- simply a filename, URL, or other way of locating the distribution.
-
-project_name
- A string, naming the project that this distribution is for. Project names
- are defined by a project's setup script, and they are used to identify
- projects on PyPI. When a ``Distribution`` is constructed, the
- `project_name` argument is passed through the ``safe_name()`` utility
- function to filter out any unacceptable characters.
-
-key
- ``dist.key`` is short for ``dist.project_name.lower()``. It's used for
- case-insensitive comparison and indexing of distributions by project name.
-
-extras
- A list of strings, giving the names of extra features defined by the
- project's dependency list (the ``extras_require`` argument specified in
- the project's setup script).
-
-version
- A string denoting what release of the project this distribution contains.
- When a ``Distribution`` is constructed, the `version` argument is passed
- through the ``safe_version()`` utility function to filter out any
- unacceptable characters. If no `version` is specified at construction
- time, then attempting to access this attribute later will cause the
- ``Distribution`` to try to discover its version by reading its ``PKG-INFO``
- metadata file. If ``PKG-INFO`` is unavailable or can't be parsed,
- ``ValueError`` is raised.
-
-parsed_version
- The ``parsed_version`` is an object representing a "parsed" form of the
- distribution's ``version``. ``dist.parsed_version`` is a shortcut for
- calling ``parse_version(dist.version)``. It is used to compare or sort
- distributions by version. (See the `Parsing Utilities`_ section below for
- more information on the ``parse_version()`` function.) Note that accessing
- ``parsed_version`` may result in a ``ValueError`` if the ``Distribution``
- was constructed without a `version` and without `metadata` capable of
- supplying the missing version info.
-
-py_version
- The major/minor Python version the distribution supports, as a string.
- For example, "2.7" or "3.4". The default is the current version of Python.
-
-platform
- A string representing the platform the distribution is intended for, or
- ``None`` if the distribution is "pure Python" and therefore cross-platform.
- See `Platform Utilities`_ below for more information on platform strings.
-
-precedence
- A distribution's ``precedence`` is used to determine the relative order of
- two distributions that have the same ``project_name`` and
- ``parsed_version``. The default precedence is ``pkg_resources.EGG_DIST``,
- which is the highest (i.e. most preferred) precedence. The full list
- of predefined precedences, from most preferred to least preferred, is:
- ``EGG_DIST``, ``BINARY_DIST``, ``SOURCE_DIST``, ``CHECKOUT_DIST``, and
- ``DEVELOP_DIST``. Normally, precedences other than ``EGG_DIST`` are used
- only by the ``setuptools.package_index`` module, when sorting distributions
- found in a package index to determine their suitability for installation.
- "System" and "Development" eggs (i.e., ones that use the ``.egg-info``
- format), however, are automatically given a precedence of ``DEVELOP_DIST``.
-
-
-
-``Distribution`` Methods
-------------------------
-
-``activate(path=None)``
- Ensure distribution is importable on `path`. If `path` is None,
- ``sys.path`` is used instead. This ensures that the distribution's
- ``location`` is in the `path` list, and it also performs any necessary
- namespace package fixups or declarations. (That is, if the distribution
- contains namespace packages, this method ensures that they are declared,
- and that the distribution's contents for those namespace packages are
- merged with the contents provided by any other active distributions. See
- the section above on `Namespace Package Support`_ for more information.)
-
- ``pkg_resources`` adds a notification callback to the global ``working_set``
- that ensures this method is called whenever a distribution is added to it.
- Therefore, you should not normally need to explicitly call this method.
- (Note that this means that namespace packages on ``sys.path`` are always
- imported as soon as ``pkg_resources`` is, which is another reason why
- namespace packages should not contain any code or import statements.)
-
-``as_requirement()``
- Return a ``Requirement`` instance that matches this distribution's project
- name and version.
-
-``requires(extras=())``
- List the ``Requirement`` objects that specify this distribution's
- dependencies. If `extras` is specified, it should be a sequence of names
- of "extras" defined by the distribution, and the list returned will then
- include any dependencies needed to support the named "extras".
-
-``clone(**kw)``
- Create a copy of the distribution. Any supplied keyword arguments override
- the corresponding argument to the ``Distribution()`` constructor, allowing
- you to change some of the copied distribution's attributes.
-
-``egg_name()``
- Return what this distribution's standard filename should be, not including
- the ".egg" extension. For example, a distribution for project "Foo"
- version 1.2 that runs on Python 2.3 for Windows would have an ``egg_name()``
- of ``Foo-1.2-py2.3-win32``. Any dashes in the name or version are
- converted to underscores. (``Distribution.from_location()`` will convert
- them back when parsing a ".egg" file name.)
-
-``__cmp__(other)``, ``__hash__()``
- Distribution objects are hashed and compared on the basis of their parsed
- version and precedence, followed by their key (lowercase project name),
- location, Python version, and platform.
-
-The following methods are used to access ``EntryPoint`` objects advertised
-by the distribution. See the section above on `Entry Points`_ for more
-detailed information about these operations:
-
-``get_entry_info(group, name)``
- Return the ``EntryPoint`` object for `group` and `name`, or None if no
- such point is advertised by this distribution.
-
-``get_entry_map(group=None)``
- Return the entry point map for `group`. If `group` is None, return
- a dictionary mapping group names to entry point maps for all groups.
- (An entry point map is a dictionary of entry point names to ``EntryPoint``
- objects.)
-
-``load_entry_point(group, name)``
- Short for ``get_entry_info(group, name).load()``. Returns the object
- advertised by the named entry point, or raises ``ImportError`` if
- the entry point isn't advertised by this distribution, or there is some
- other import problem.
-
-In addition to the above methods, ``Distribution`` objects also implement all
-of the `IResourceProvider`_ and `IMetadataProvider Methods`_ (which are
-documented in later sections):
-
-* ``has_metadata(name)``
-* ``metadata_isdir(name)``
-* ``metadata_listdir(name)``
-* ``get_metadata(name)``
-* ``get_metadata_lines(name)``
-* ``run_script(script_name, namespace)``
-* ``get_resource_filename(manager, resource_name)``
-* ``get_resource_stream(manager, resource_name)``
-* ``get_resource_string(manager, resource_name)``
-* ``has_resource(resource_name)``
-* ``resource_isdir(resource_name)``
-* ``resource_listdir(resource_name)``
-
-If the distribution was created with a `metadata` argument, these resource and
-metadata access methods are all delegated to that `metadata` provider.
-Otherwise, they are delegated to an ``EmptyProvider``, so that the distribution
-will appear to have no resources or metadata. This delegation approach is used
-so that supporting custom importers or new distribution formats can be done
-simply by creating an appropriate `IResourceProvider`_ implementation; see the
-section below on `Supporting Custom Importers`_ for more details.
-
-
-``ResourceManager`` API
-=======================
-
-The ``ResourceManager`` class provides uniform access to package resources,
-whether those resources exist as files and directories or are compressed in
-an archive of some kind.
-
-Normally, you do not need to create or explicitly manage ``ResourceManager``
-instances, as the ``pkg_resources`` module creates a global instance for you,
-and makes most of its methods available as top-level names in the
-``pkg_resources`` module namespace. So, for example, this code actually
-calls the ``resource_string()`` method of the global ``ResourceManager``::
-
- import pkg_resources
- my_data = pkg_resources.resource_string(__name__, "foo.dat")
-
-Thus, you can use the APIs below without needing an explicit
-``ResourceManager`` instance; just import and use them as needed.
-
-
-Basic Resource Access
----------------------
-
-In the following methods, the `package_or_requirement` argument may be either
-a Python package/module name (e.g. ``foo.bar``) or a ``Requirement`` instance.
-If it is a package or module name, the named module or package must be
-importable (i.e., be in a distribution or directory on ``sys.path``), and the
-`resource_name` argument is interpreted relative to the named package. (Note
-that if a module name is used, then the resource name is relative to the
-package immediately containing the named module. Also, you should not use use
-a namespace package name, because a namespace package can be spread across
-multiple distributions, and is therefore ambiguous as to which distribution
-should be searched for the resource.)
-
-If it is a ``Requirement``, then the requirement is automatically resolved
-(searching the current ``Environment`` if necessary) and a matching
-distribution is added to the ``WorkingSet`` and ``sys.path`` if one was not
-already present. (Unless the ``Requirement`` can't be satisfied, in which
-case an exception is raised.) The `resource_name` argument is then interpreted
-relative to the root of the identified distribution; i.e. its first path
-segment will be treated as a peer of the top-level modules or packages in the
-distribution.
-
-Note that resource names must be ``/``-separated paths and cannot be absolute
-(i.e. no leading ``/``) or contain relative names like ``".."``. Do *not* use
-``os.path`` routines to manipulate resource paths, as they are *not* filesystem
-paths.
-
-``resource_exists(package_or_requirement, resource_name)``
- Does the named resource exist? Return ``True`` or ``False`` accordingly.
-
-``resource_stream(package_or_requirement, resource_name)``
- Return a readable file-like object for the specified resource; it may be
- an actual file, a ``StringIO``, or some similar object. The stream is
- in "binary mode", in the sense that whatever bytes are in the resource
- will be read as-is.
-
-``resource_string(package_or_requirement, resource_name)``
- Return the specified resource as a string. The resource is read in
- binary fashion, such that the returned string contains exactly the bytes
- that are stored in the resource.
-
-``resource_isdir(package_or_requirement, resource_name)``
- Is the named resource a directory? Return ``True`` or ``False``
- accordingly.
-
-``resource_listdir(package_or_requirement, resource_name)``
- List the contents of the named resource directory, just like ``os.listdir``
- except that it works even if the resource is in a zipfile.
-
-Note that only ``resource_exists()`` and ``resource_isdir()`` are insensitive
-as to the resource type. You cannot use ``resource_listdir()`` on a file
-resource, and you can't use ``resource_string()`` or ``resource_stream()`` on
-directory resources. Using an inappropriate method for the resource type may
-result in an exception or undefined behavior, depending on the platform and
-distribution format involved.
-
-
-Resource Extraction
--------------------
-
-``resource_filename(package_or_requirement, resource_name)``
- Sometimes, it is not sufficient to access a resource in string or stream
- form, and a true filesystem filename is needed. In such cases, you can
- use this method (or module-level function) to obtain a filename for a
- resource. If the resource is in an archive distribution (such as a zipped
- egg), it will be extracted to a cache directory, and the filename within
- the cache will be returned. If the named resource is a directory, then
- all resources within that directory (including subdirectories) are also
- extracted. If the named resource is a C extension or "eager resource"
- (see the ``setuptools`` documentation for details), then all C extensions
- and eager resources are extracted at the same time.
-
- Archived resources are extracted to a cache location that can be managed by
- the following two methods:
-
-``set_extraction_path(path)``
- Set the base path where resources will be extracted to, if needed.
-
- If you do not call this routine before any extractions take place, the
- path defaults to the return value of ``get_default_cache()``. (Which is
- based on the ``PYTHON_EGG_CACHE`` environment variable, with various
- platform-specific fallbacks. See that routine's documentation for more
- details.)
-
- Resources are extracted to subdirectories of this path based upon
- information given by the resource provider. You may set this to a
- temporary directory, but then you must call ``cleanup_resources()`` to
- delete the extracted files when done. There is no guarantee that
- ``cleanup_resources()`` will be able to remove all extracted files. (On
- Windows, for example, you can't unlink .pyd or .dll files that are still
- in use.)
-
- Note that you may not change the extraction path for a given resource
- manager once resources have been extracted, unless you first call
- ``cleanup_resources()``.
-
-``cleanup_resources(force=False)``
- Delete all extracted resource files and directories, returning a list
- of the file and directory names that could not be successfully removed.
- This function does not have any concurrency protection, so it should
- generally only be called when the extraction path is a temporary
- directory exclusive to a single process. This method is not
- automatically called; you must call it explicitly or register it as an
- ``atexit`` function if you wish to ensure cleanup of a temporary
- directory used for extractions.
-
-
-"Provider" Interface
---------------------
-
-If you are implementing an ``IResourceProvider`` and/or ``IMetadataProvider``
-for a new distribution archive format, you may need to use the following
-``IResourceManager`` methods to co-ordinate extraction of resources to the
-filesystem. If you're not implementing an archive format, however, you have
-no need to use these methods. Unlike the other methods listed above, they are
-*not* available as top-level functions tied to the global ``ResourceManager``;
-you must therefore have an explicit ``ResourceManager`` instance to use them.
-
-``get_cache_path(archive_name, names=())``
- Return absolute location in cache for `archive_name` and `names`
-
- The parent directory of the resulting path will be created if it does
- not already exist. `archive_name` should be the base filename of the
- enclosing egg (which may not be the name of the enclosing zipfile!),
- including its ".egg" extension. `names`, if provided, should be a
- sequence of path name parts "under" the egg's extraction location.
-
- This method should only be called by resource providers that need to
- obtain an extraction location, and only for names they intend to
- extract, as it tracks the generated names for possible cleanup later.
-
-``extraction_error()``
- Raise an ``ExtractionError`` describing the active exception as interfering
- with the extraction process. You should call this if you encounter any
- OS errors extracting the file to the cache path; it will format the
- operating system exception for you, and add other information to the
- ``ExtractionError`` instance that may be needed by programs that want to
- wrap or handle extraction errors themselves.
-
-``postprocess(tempname, filename)``
- Perform any platform-specific postprocessing of `tempname`.
- Resource providers should call this method ONLY after successfully
- extracting a compressed resource. They must NOT call it on resources
- that are already in the filesystem.
-
- `tempname` is the current (temporary) name of the file, and `filename`
- is the name it will be renamed to by the caller after this routine
- returns.
-
-
-Metadata API
-============
-
-The metadata API is used to access metadata resources bundled in a pluggable
-distribution. Metadata resources are virtual files or directories containing
-information about the distribution, such as might be used by an extensible
-application or framework to connect "plugins". Like other kinds of resources,
-metadata resource names are ``/``-separated and should not contain ``..`` or
-begin with a ``/``. You should not use ``os.path`` routines to manipulate
-resource paths.
-
-The metadata API is provided by objects implementing the ``IMetadataProvider``
-or ``IResourceProvider`` interfaces. ``Distribution`` objects implement this
-interface, as do objects returned by the ``get_provider()`` function:
-
-``get_provider(package_or_requirement)``
- If a package name is supplied, return an ``IResourceProvider`` for the
- package. If a ``Requirement`` is supplied, resolve it by returning a
- ``Distribution`` from the current working set (searching the current
- ``Environment`` if necessary and adding the newly found ``Distribution``
- to the working set). If the named package can't be imported, or the
- ``Requirement`` can't be satisfied, an exception is raised.
-
- NOTE: if you use a package name rather than a ``Requirement``, the object
- you get back may not be a pluggable distribution, depending on the method
- by which the package was installed. In particular, "development" packages
- and "single-version externally-managed" packages do not have any way to
- map from a package name to the corresponding project's metadata. Do not
- write code that passes a package name to ``get_provider()`` and then tries
- to retrieve project metadata from the returned object. It may appear to
- work when the named package is in an ``.egg`` file or directory, but
- it will fail in other installation scenarios. If you want project
- metadata, you need to ask for a *project*, not a package.
-
-
-``IMetadataProvider`` Methods
------------------------------
-
-The methods provided by objects (such as ``Distribution`` instances) that
-implement the ``IMetadataProvider`` or ``IResourceProvider`` interfaces are:
-
-``has_metadata(name)``
- Does the named metadata resource exist?
-
-``metadata_isdir(name)``
- Is the named metadata resource a directory?
-
-``metadata_listdir(name)``
- List of metadata names in the directory (like ``os.listdir()``)
-
-``get_metadata(name)``
- Return the named metadata resource as a string. The data is read in binary
- mode; i.e., the exact bytes of the resource file are returned.
-
-``get_metadata_lines(name)``
- Yield named metadata resource as list of non-blank non-comment lines. This
- is short for calling ``yield_lines(provider.get_metadata(name))``. See the
- section on `yield_lines()`_ below for more information on the syntax it
- recognizes.
-
-``run_script(script_name, namespace)``
- Execute the named script in the supplied namespace dictionary. Raises
- ``ResolutionError`` if there is no script by that name in the ``scripts``
- metadata directory. `namespace` should be a Python dictionary, usually
- a module dictionary if the script is being run as a module.
-
-
-Exceptions
-==========
-
-``pkg_resources`` provides a simple exception hierarchy for problems that may
-occur when processing requests to locate and activate packages::
-
- ResolutionError
- DistributionNotFound
- VersionConflict
- UnknownExtra
-
- ExtractionError
-
-``ResolutionError``
- This class is used as a base class for the other three exceptions, so that
- you can catch all of them with a single "except" clause. It is also raised
- directly for miscellaneous requirement-resolution problems like trying to
- run a script that doesn't exist in the distribution it was requested from.
-
-``DistributionNotFound``
- A distribution needed to fulfill a requirement could not be found.
-
-``VersionConflict``
- The requested version of a project conflicts with an already-activated
- version of the same project.
-
-``UnknownExtra``
- One of the "extras" requested was not recognized by the distribution it
- was requested from.
-
-``ExtractionError``
- A problem occurred extracting a resource to the Python Egg cache. The
- following attributes are available on instances of this exception:
-
- manager
- The resource manager that raised this exception
-
- cache_path
- The base directory for resource extraction
-
- original_error
- The exception instance that caused extraction to fail
-
-
-Supporting Custom Importers
-===========================
-
-By default, ``pkg_resources`` supports normal filesystem imports, and
-``zipimport`` importers. If you wish to use the ``pkg_resources`` features
-with other (PEP 302-compatible) importers or module loaders, you may need to
-register various handlers and support functions using these APIs:
-
-``register_finder(importer_type, distribution_finder)``
- Register `distribution_finder` to find distributions in ``sys.path`` items.
- `importer_type` is the type or class of a PEP 302 "Importer" (``sys.path``
- item handler), and `distribution_finder` is a callable that, when passed a
- path item, the importer instance, and an `only` flag, yields
- ``Distribution`` instances found under that path item. (The `only` flag,
- if true, means the finder should yield only ``Distribution`` objects whose
- ``location`` is equal to the path item provided.)
-
- See the source of the ``pkg_resources.find_on_path`` function for an
- example finder function.
-
-``register_loader_type(loader_type, provider_factory)``
- Register `provider_factory` to make ``IResourceProvider`` objects for
- `loader_type`. `loader_type` is the type or class of a PEP 302
- ``module.__loader__``, and `provider_factory` is a function that, when
- passed a module object, returns an `IResourceProvider`_ for that module,
- allowing it to be used with the `ResourceManager API`_.
-
-``register_namespace_handler(importer_type, namespace_handler)``
- Register `namespace_handler` to declare namespace packages for the given
- `importer_type`. `importer_type` is the type or class of a PEP 302
- "importer" (sys.path item handler), and `namespace_handler` is a callable
- with a signature like this::
-
- def namespace_handler(importer, path_entry, moduleName, module):
- # return a path_entry to use for child packages
-
- Namespace handlers are only called if the relevant importer object has
- already agreed that it can handle the relevant path item. The handler
- should only return a subpath if the module ``__path__`` does not already
- contain an equivalent subpath. Otherwise, it should return None.
-
- For an example namespace handler, see the source of the
- ``pkg_resources.file_ns_handler`` function, which is used for both zipfile
- importing and regular importing.
-
-
-IResourceProvider
------------------
-
-``IResourceProvider`` is an abstract class that documents what methods are
-required of objects returned by a `provider_factory` registered with
-``register_loader_type()``. ``IResourceProvider`` is a subclass of
-``IMetadataProvider``, so objects that implement this interface must also
-implement all of the `IMetadataProvider Methods`_ as well as the methods
-shown here. The `manager` argument to the methods below must be an object
-that supports the full `ResourceManager API`_ documented above.
-
-``get_resource_filename(manager, resource_name)``
- Return a true filesystem path for `resource_name`, coordinating the
- extraction with `manager`, if the resource must be unpacked to the
- filesystem.
-
-``get_resource_stream(manager, resource_name)``
- Return a readable file-like object for `resource_name`.
-
-``get_resource_string(manager, resource_name)``
- Return a string containing the contents of `resource_name`.
-
-``has_resource(resource_name)``
- Does the package contain the named resource?
-
-``resource_isdir(resource_name)``
- Is the named resource a directory? Return a false value if the resource
- does not exist or is not a directory.
-
-``resource_listdir(resource_name)``
- Return a list of the contents of the resource directory, ala
- ``os.listdir()``. Requesting the contents of a non-existent directory may
- raise an exception.
-
-Note, by the way, that your provider classes need not (and should not) subclass
-``IResourceProvider`` or ``IMetadataProvider``! These classes exist solely
-for documentation purposes and do not provide any useful implementation code.
-You may instead wish to subclass one of the `built-in resource providers`_.
-
-
-Built-in Resource Providers
----------------------------
-
-``pkg_resources`` includes several provider classes that are automatically used
-where appropriate. Their inheritance tree looks like this::
-
- NullProvider
- EggProvider
- DefaultProvider
- PathMetadata
- ZipProvider
- EggMetadata
- EmptyProvider
- FileMetadata
-
-
-``NullProvider``
- This provider class is just an abstract base that provides for common
- provider behaviors (such as running scripts), given a definition for just
- a few abstract methods.
-
-``EggProvider``
- This provider class adds in some egg-specific features that are common
- to zipped and unzipped eggs.
-
-``DefaultProvider``
- This provider class is used for unpacked eggs and "plain old Python"
- filesystem modules.
-
-``ZipProvider``
- This provider class is used for all zipped modules, whether they are eggs
- or not.
-
-``EmptyProvider``
- This provider class always returns answers consistent with a provider that
- has no metadata or resources. ``Distribution`` objects created without
- a ``metadata`` argument use an instance of this provider class instead.
- Since all ``EmptyProvider`` instances are equivalent, there is no need
- to have more than one instance. ``pkg_resources`` therefore creates a
- global instance of this class under the name ``empty_provider``, and you
- may use it if you have need of an ``EmptyProvider`` instance.
-
-``PathMetadata(path, egg_info)``
- Create an ``IResourceProvider`` for a filesystem-based distribution, where
- `path` is the filesystem location of the importable modules, and `egg_info`
- is the filesystem location of the distribution's metadata directory.
- `egg_info` should usually be the ``EGG-INFO`` subdirectory of `path` for an
- "unpacked egg", and a ``ProjectName.egg-info`` subdirectory of `path` for
- a "development egg". However, other uses are possible for custom purposes.
-
-``EggMetadata(zipimporter)``
- Create an ``IResourceProvider`` for a zipfile-based distribution. The
- `zipimporter` should be a ``zipimport.zipimporter`` instance, and may
- represent a "basket" (a zipfile containing multiple ".egg" subdirectories)
- a specific egg *within* a basket, or a zipfile egg (where the zipfile
- itself is a ".egg"). It can also be a combination, such as a zipfile egg
- that also contains other eggs.
-
-``FileMetadata(path_to_pkg_info)``
- Create an ``IResourceProvider`` that provides exactly one metadata
- resource: ``PKG-INFO``. The supplied path should be a distutils PKG-INFO
- file. This is basically the same as an ``EmptyProvider``, except that
- requests for ``PKG-INFO`` will be answered using the contents of the
- designated file. (This provider is used to wrap ``.egg-info`` files
- installed by vendor-supplied system packages.)
-
-
-Utility Functions
-=================
-
-In addition to its high-level APIs, ``pkg_resources`` also includes several
-generally-useful utility routines. These routines are used to implement the
-high-level APIs, but can also be quite useful by themselves.
-
-
-Parsing Utilities
------------------
-
-``parse_version(version)``
- Parsed a project's version string as defined by PEP 440. The returned
- value will be an object that represents the version. These objects may
- be compared to each other and sorted. The sorting algorithm is as defined
- by PEP 440 with the addition that any version which is not a valid PEP 440
- version will be considered less than any valid PEP 440 version and the
- invalid versions will continue sorting using the original algorithm.
-
-.. _yield_lines():
-
-``yield_lines(strs)``
- Yield non-empty/non-comment lines from a string/unicode or a possibly-
- nested sequence thereof. If `strs` is an instance of ``basestring``, it
- is split into lines, and each non-blank, non-comment line is yielded after
- stripping leading and trailing whitespace. (Lines whose first non-blank
- character is ``#`` are considered comment lines.)
-
- If `strs` is not an instance of ``basestring``, it is iterated over, and
- each item is passed recursively to ``yield_lines()``, so that an arbitrarily
- nested sequence of strings, or sequences of sequences of strings can be
- flattened out to the lines contained therein. So for example, passing
- a file object or a list of strings to ``yield_lines`` will both work.
- (Note that between each string in a sequence of strings there is assumed to
- be an implicit line break, so lines cannot bridge two strings in a
- sequence.)
-
- This routine is used extensively by ``pkg_resources`` to parse metadata
- and file formats of various kinds, and most other ``pkg_resources``
- parsing functions that yield multiple values will use it to break up their
- input. However, this routine is idempotent, so calling ``yield_lines()``
- on the output of another call to ``yield_lines()`` is completely harmless.
-
-``split_sections(strs)``
- Split a string (or possibly-nested iterable thereof), yielding ``(section,
- content)`` pairs found using an ``.ini``-like syntax. Each ``section`` is
- a whitespace-stripped version of the section name ("``[section]``")
- and each ``content`` is a list of stripped lines excluding blank lines and
- comment-only lines. If there are any non-blank, non-comment lines before
- the first section header, they're yielded in a first ``section`` of
- ``None``.
-
- This routine uses ``yield_lines()`` as its front end, so you can pass in
- anything that ``yield_lines()`` accepts, such as an open text file, string,
- or sequence of strings. ``ValueError`` is raised if a malformed section
- header is found (i.e. a line starting with ``[`` but not ending with
- ``]``).
-
- Note that this simplistic parser assumes that any line whose first nonblank
- character is ``[`` is a section heading, so it can't support .ini format
- variations that allow ``[`` as the first nonblank character on other lines.
-
-``safe_name(name)``
- Return a "safe" form of a project's name, suitable for use in a
- ``Requirement`` string, as a distribution name, or a PyPI project name.
- All non-alphanumeric runs are condensed to single "-" characters, such that
- a name like "The $$$ Tree" becomes "The-Tree". Note that if you are
- generating a filename from this value you should combine it with a call to
- ``to_filename()`` so all dashes ("-") are replaced by underscores ("_").
- See ``to_filename()``.
-
-``safe_version(version)``
- This will return the normalized form of any PEP 440 version, if the version
- string is not PEP 440 compatible than it is similar to ``safe_name()``
- except that spaces in the input become dots, and dots are allowed to exist
- in the output. As with ``safe_name()``, if you are generating a filename
- from this you should replace any "-" characters in the output with
- underscores.
-
-``safe_extra(extra)``
- Return a "safe" form of an extra's name, suitable for use in a requirement
- string or a setup script's ``extras_require`` keyword. This routine is
- similar to ``safe_name()`` except that non-alphanumeric runs are replaced
- by a single underbar (``_``), and the result is lowercased.
-
-``to_filename(name_or_version)``
- Escape a name or version string so it can be used in a dash-separated
- filename (or ``#egg=name-version`` tag) without ambiguity. You
- should only pass in values that were returned by ``safe_name()`` or
- ``safe_version()``.
-
-
-Platform Utilities
-------------------
-
-``get_build_platform()``
- Return this platform's identifier string. For Windows, the return value
- is ``"win32"``, and for Mac OS X it is a string of the form
- ``"macosx-10.4-ppc"``. All other platforms return the same uname-based
- string that the ``distutils.util.get_platform()`` function returns.
- This string is the minimum platform version required by distributions built
- on the local machine. (Backward compatibility note: setuptools versions
- prior to 0.6b1 called this function ``get_platform()``, and the function is
- still available under that name for backward compatibility reasons.)
-
-``get_supported_platform()`` (New in 0.6b1)
- This is the similar to ``get_build_platform()``, but is the maximum
- platform version that the local machine supports. You will usually want
- to use this value as the ``provided`` argument to the
- ``compatible_platforms()`` function.
-
-``compatible_platforms(provided, required)``
- Return true if a distribution built on the `provided` platform may be used
- on the `required` platform. If either platform value is ``None``, it is
- considered a wildcard, and the platforms are therefore compatible.
- Likewise, if the platform strings are equal, they're also considered
- compatible, and ``True`` is returned. Currently, the only non-equal
- platform strings that are considered compatible are Mac OS X platform
- strings with the same hardware type (e.g. ``ppc``) and major version
- (e.g. ``10``) with the `provided` platform's minor version being less than
- or equal to the `required` platform's minor version.
-
-``get_default_cache()``
- Determine the default cache location for extracting resources from zipped
- eggs. This routine returns the ``PYTHON_EGG_CACHE`` environment variable,
- if set. Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of
- the user's "Application Data" directory. On all other systems, it returns
- ``os.path.expanduser("~/.python-eggs")`` if ``PYTHON_EGG_CACHE`` is not
- set.
-
-
-PEP 302 Utilities
------------------
-
-``get_importer(path_item)``
- Retrieve a PEP 302 "importer" for the given path item (which need not
- actually be on ``sys.path``). This routine simulates the PEP 302 protocol
- for obtaining an "importer" object. It first checks for an importer for
- the path item in ``sys.path_importer_cache``, and if not found it calls
- each of the ``sys.path_hooks`` and caches the result if a good importer is
- found. If no importer is found, this routine returns an ``ImpWrapper``
- instance that wraps the builtin import machinery as a PEP 302-compliant
- "importer" object. This ``ImpWrapper`` is *not* cached; instead a new
- instance is returned each time.
-
- (Note: When run under Python 2.5, this function is simply an alias for
- ``pkgutil.get_importer()``, and instead of ``pkg_resources.ImpWrapper``
- instances, it may return ``pkgutil.ImpImporter`` instances.)
-
-
-File/Path Utilities
--------------------
-
-``ensure_directory(path)``
- Ensure that the parent directory (``os.path.dirname``) of `path` actually
- exists, using ``os.makedirs()`` if necessary.
-
-``normalize_path(path)``
- Return a "normalized" version of `path`, such that two paths represent
- the same filesystem location if they have equal ``normalized_path()``
- values. Specifically, this is a shortcut for calling ``os.path.realpath``
- and ``os.path.normcase`` on `path`. Unfortunately, on certain platforms
- (notably Cygwin and Mac OS X) the ``normcase`` function does not accurately
- reflect the platform's case-sensitivity, so there is always the possibility
- of two apparently-different paths being equal on such platforms.
-
-History
--------
-
-0.6c9
- * Fix ``resource_listdir('')`` always returning an empty list for zipped eggs.
-
-0.6c7
- * Fix package precedence problem where single-version eggs installed in
- ``site-packages`` would take precedence over ``.egg`` files (or directories)
- installed in ``site-packages``.
-
-0.6c6
- * Fix extracted C extensions not having executable permissions under Cygwin.
-
- * Allow ``.egg-link`` files to contain relative paths.
-
- * Fix cache dir defaults on Windows when multiple environment vars are needed
- to construct a path.
-
-0.6c4
- * Fix "dev" versions being considered newer than release candidates.
-
-0.6c3
- * Python 2.5 compatibility fixes.
-
-0.6c2
- * Fix a problem with eggs specified directly on ``PYTHONPATH`` on
- case-insensitive filesystems possibly not showing up in the default
- working set, due to differing normalizations of ``sys.path`` entries.
-
-0.6b3
- * Fixed a duplicate path insertion problem on case-insensitive filesystems.
-
-0.6b1
- * Split ``get_platform()`` into ``get_supported_platform()`` and
- ``get_build_platform()`` to work around a Mac versioning problem that caused
- the behavior of ``compatible_platforms()`` to be platform specific.
-
- * Fix entry point parsing when a standalone module name has whitespace
- between it and the extras.
-
-0.6a11
- * Added ``ExtractionError`` and ``ResourceManager.extraction_error()`` so that
- cache permission problems get a more user-friendly explanation of the
- problem, and so that programs can catch and handle extraction errors if they
- need to.
-
-0.6a10
- * Added the ``extras`` attribute to ``Distribution``, the ``find_plugins()``
- method to ``WorkingSet``, and the ``__add__()`` and ``__iadd__()`` methods
- to ``Environment``.
-
- * ``safe_name()`` now allows dots in project names.
-
- * There is a new ``to_filename()`` function that escapes project names and
- versions for safe use in constructing egg filenames from a Distribution
- object's metadata.
-
- * Added ``Distribution.clone()`` method, and keyword argument support to other
- ``Distribution`` constructors.
-
- * Added the ``DEVELOP_DIST`` precedence, and automatically assign it to
- eggs using ``.egg-info`` format.
-
-0.6a9
- * Don't raise an error when an invalid (unfinished) distribution is found
- unless absolutely necessary. Warn about skipping invalid/unfinished eggs
- when building an Environment.
-
- * Added support for ``.egg-info`` files or directories with version/platform
- information embedded in the filename, so that system packagers have the
- option of including ``PKG-INFO`` files to indicate the presence of a
- system-installed egg, without needing to use ``.egg`` directories, zipfiles,
- or ``.pth`` manipulation.
-
- * Changed ``parse_version()`` to remove dashes before pre-release tags, so
- that ``0.2-rc1`` is considered an *older* version than ``0.2``, and is equal
- to ``0.2rc1``. The idea that a dash *always* meant a post-release version
- was highly non-intuitive to setuptools users and Python developers, who
- seem to want to use ``-rc`` version numbers a lot.
-
-0.6a8
- * Fixed a problem with ``WorkingSet.resolve()`` that prevented version
- conflicts from being detected at runtime.
-
- * Improved runtime conflict warning message to identify a line in the user's
- program, rather than flagging the ``warn()`` call in ``pkg_resources``.
-
- * Avoid giving runtime conflict warnings for namespace packages, even if they
- were declared by a different package than the one currently being activated.
-
- * Fix path insertion algorithm for case-insensitive filesystems.
-
- * Fixed a problem with nested namespace packages (e.g. ``peak.util``) not
- being set as an attribute of their parent package.
-
-0.6a6
- * Activated distributions are now inserted in ``sys.path`` (and the working
- set) just before the directory that contains them, instead of at the end.
- This allows e.g. eggs in ``site-packages`` to override unmanaged modules in
- the same location, and allows eggs found earlier on ``sys.path`` to override
- ones found later.
-
- * When a distribution is activated, it now checks whether any contained
- non-namespace modules have already been imported and issues a warning if
- a conflicting module has already been imported.
-
- * Changed dependency processing so that it's breadth-first, allowing a
- depender's preferences to override those of a dependee, to prevent conflicts
- when a lower version is acceptable to the dependee, but not the depender.
-
- * Fixed a problem extracting zipped files on Windows, when the egg in question
- has had changed contents but still has the same version number.
-
-0.6a4
- * Fix a bug in ``WorkingSet.resolve()`` that was introduced in 0.6a3.
-
-0.6a3
- * Added ``safe_extra()`` parsing utility routine, and use it for Requirement,
- EntryPoint, and Distribution objects' extras handling.
-
-0.6a1
- * Enhanced performance of ``require()`` and related operations when all
- requirements are already in the working set, and enhanced performance of
- directory scanning for distributions.
-
- * Fixed some problems using ``pkg_resources`` w/PEP 302 loaders other than
- ``zipimport``, and the previously-broken "eager resource" support.
-
- * Fixed ``pkg_resources.resource_exists()`` not working correctly, along with
- some other resource API bugs.
-
- * Many API changes and enhancements:
-
- * Added ``EntryPoint``, ``get_entry_map``, ``load_entry_point``, and
- ``get_entry_info`` APIs for dynamic plugin discovery.
-
- * ``list_resources`` is now ``resource_listdir`` (and it actually works)
-
- * Resource API functions like ``resource_string()`` that accepted a package
- name and resource name, will now also accept a ``Requirement`` object in
- place of the package name (to allow access to non-package data files in
- an egg).
-
- * ``get_provider()`` will now accept a ``Requirement`` instance or a module
- name. If it is given a ``Requirement``, it will return a corresponding
- ``Distribution`` (by calling ``require()`` if a suitable distribution
- isn't already in the working set), rather than returning a metadata and
- resource provider for a specific module. (The difference is in how
- resource paths are interpreted; supplying a module name means resources
- path will be module-relative, rather than relative to the distribution's
- root.)
-
- * ``Distribution`` objects now implement the ``IResourceProvider`` and
- ``IMetadataProvider`` interfaces, so you don't need to reference the (no
- longer available) ``metadata`` attribute to get at these interfaces.
-
- * ``Distribution`` and ``Requirement`` both have a ``project_name``
- attribute for the project name they refer to. (Previously these were
- ``name`` and ``distname`` attributes.)
-
- * The ``path`` attribute of ``Distribution`` objects is now ``location``,
- because it isn't necessarily a filesystem path (and hasn't been for some
- time now). The ``location`` of ``Distribution`` objects in the filesystem
- should always be normalized using ``pkg_resources.normalize_path()``; all
- of the setuptools and EasyInstall code that generates distributions from
- the filesystem (including ``Distribution.from_filename()``) ensure this
- invariant, but if you use a more generic API like ``Distribution()`` or
- ``Distribution.from_location()`` you should take care that you don't
- create a distribution with an un-normalized filesystem path.
-
- * ``Distribution`` objects now have an ``as_requirement()`` method that
- returns a ``Requirement`` for the distribution's project name and version.
-
- * Distribution objects no longer have an ``installed_on()`` method, and the
- ``install_on()`` method is now ``activate()`` (but may go away altogether
- soon). The ``depends()`` method has also been renamed to ``requires()``,
- and ``InvalidOption`` is now ``UnknownExtra``.
-
- * ``find_distributions()`` now takes an additional argument called ``only``,
- that tells it to only yield distributions whose location is the passed-in
- path. (It defaults to False, so that the default behavior is unchanged.)
-
- * ``AvailableDistributions`` is now called ``Environment``, and the
- ``get()``, ``__len__()``, and ``__contains__()`` methods were removed,
- because they weren't particularly useful. ``__getitem__()`` no longer
- raises ``KeyError``; it just returns an empty list if there are no
- distributions for the named project.
-
- * The ``resolve()`` method of ``Environment`` is now a method of
- ``WorkingSet`` instead, and the ``best_match()`` method now uses a working
- set instead of a path list as its second argument.
-
- * There is a new ``pkg_resources.add_activation_listener()`` API that lets
- you register a callback for notifications about distributions added to
- ``sys.path`` (including the distributions already on it). This is
- basically a hook for extensible applications and frameworks to be able to
- search for plugin metadata in distributions added at runtime.
-
-0.5a13
- * Fixed a bug in resource extraction from nested packages in a zipped egg.
-
-0.5a12
- * Updated extraction/cache mechanism for zipped resources to avoid inter-
- process and inter-thread races during extraction. The default cache
- location can now be set via the ``PYTHON_EGGS_CACHE`` environment variable,
- and the default Windows cache is now a ``Python-Eggs`` subdirectory of the
- current user's "Application Data" directory, if the ``PYTHON_EGGS_CACHE``
- variable isn't set.
-
-0.5a10
- * Fix a problem with ``pkg_resources`` being confused by non-existent eggs on
- ``sys.path`` (e.g. if a user deletes an egg without removing it from the
- ``easy-install.pth`` file).
-
- * Fix a problem with "basket" support in ``pkg_resources``, where egg-finding
- never actually went inside ``.egg`` files.
-
- * Made ``pkg_resources`` import the module you request resources from, if it's
- not already imported.
-
-0.5a4
- * ``pkg_resources.AvailableDistributions.resolve()`` and related methods now
- accept an ``installer`` argument: a callable taking one argument, a
- ``Requirement`` instance. The callable must return a ``Distribution``
- object, or ``None`` if no distribution is found. This feature is used by
- EasyInstall to resolve dependencies by recursively invoking itself.
-
-0.4a4
- * Fix problems with ``resource_listdir()``, ``resource_isdir()`` and resource
- directory extraction for zipped eggs.
-
-0.4a3
- * Fixed scripts not being able to see a ``__file__`` variable in ``__main__``
-
- * Fixed a problem with ``resource_isdir()`` implementation that was introduced
- in 0.4a2.
-
-0.4a1
- * Fixed a bug in requirements processing for exact versions (i.e. ``==`` and
- ``!=``) when only one condition was included.
-
- * Added ``safe_name()`` and ``safe_version()`` APIs to clean up handling of
- arbitrary distribution names and versions found on PyPI.
-
-0.3a4
- * ``pkg_resources`` now supports resource directories, not just the resources
- in them. In particular, there are ``resource_listdir()`` and
- ``resource_isdir()`` APIs.
-
- * ``pkg_resources`` now supports "egg baskets" -- .egg zipfiles which contain
- multiple distributions in subdirectories whose names end with ``.egg``.
- Having such a "basket" in a directory on ``sys.path`` is equivalent to
- having the individual eggs in that directory, but the contained eggs can
- be individually added (or not) to ``sys.path``. Currently, however, there
- is no automated way to create baskets.
-
- * Namespace package manipulation is now protected by the Python import lock.
-
-0.3a1
- * Initial release.
-
diff --git a/docs/python3.txt b/docs/python3.txt
deleted file mode 100644
index d550cb68..00000000
--- a/docs/python3.txt
+++ /dev/null
@@ -1,94 +0,0 @@
-=====================================================
-Supporting both Python 2 and Python 3 with Setuptools
-=====================================================
-
-Starting with Distribute version 0.6.2 and Setuptools 0.7, the Setuptools
-project supported Python 3. Installing and
-using setuptools for Python 3 code works exactly the same as for Python 2
-code.
-
-Setuptools provides a facility to invoke 2to3 on the code as a part of the
-build process, by setting the keyword parameter ``use_2to3`` to True, but
-the Setuptools strongly recommends instead developing a unified codebase
-using `six <https://pypi.python.org/pypi/six>`_,
-`future <https://pypi.python.org/pypi/future>`_, or another compatibility
-library.
-
-
-Using 2to3
-==========
-
-Setuptools attempts to make the porting process easier by automatically
-running
-2to3 as a part of running tests. To do so, you need to configure the
-setup.py so that you can run the unit tests with ``python setup.py test``.
-
-See :ref:`test` for more information on this.
-
-Once you have the tests running under Python 2, you can add the use_2to3
-keyword parameters to setup(), and start running the tests under Python 3.
-The test command will now first run the build command during which the code
-will be converted with 2to3, and the tests will then be run from the build
-directory, as opposed from the source directory as is normally done.
-
-Setuptools will convert all Python files, and also all doctests in Python
-files. However, if you have doctests located in separate text files, these
-will not automatically be converted. By adding them to the
-``convert_2to3_doctests`` keyword parameter Setuptools will convert them as
-well.
-
-By default, the conversion uses all fixers in the ``lib2to3.fixers`` package.
-To use additional fixers, the parameter ``use_2to3_fixers`` can be set
-to a list of names of packages containing fixers. To exclude fixers, the
-parameter ``use_2to3_exclude_fixers`` can be set to fixer names to be
-skipped.
-
-An example setup.py might look something like this::
-
- from setuptools import setup
-
- setup(
- name='your.module',
- version='1.0',
- description='This is your awesome module',
- author='You',
- author_email='your@email',
- package_dir={'': 'src'},
- packages=['your', 'you.module'],
- test_suite='your.module.tests',
- use_2to3=True,
- convert_2to3_doctests=['src/your/module/README.txt'],
- use_2to3_fixers=['your.fixers'],
- use_2to3_exclude_fixers=['lib2to3.fixes.fix_import'],
- )
-
-Differential conversion
------------------------
-
-Note that a file will only be copied and converted during the build process
-if the source file has been changed. If you add a file to the doctests
-that should be converted, it will not be converted the next time you run
-the tests, since it hasn't been modified. You need to remove it from the
-build directory. Also if you run the build, install or test commands before
-adding the use_2to3 parameter, you will have to remove the build directory
-before you run the test command, as the files otherwise will seem updated,
-and no conversion will happen.
-
-In general, if code doesn't seem to be converted, deleting the build directory
-and trying again is a good safeguard against the build directory getting
-"out of sync" with the source directory.
-
-Distributing Python 3 modules
-=============================
-
-You can distribute your modules with Python 3 support in different ways. A
-normal source distribution will work, but can be slow in installing, as the
-2to3 process will be run during the install. But you can also distribute
-the module in binary format, such as a binary egg. That egg will contain the
-already converted code, and hence no 2to3 conversion is needed during install.
-
-Advanced features
-=================
-
-If you don't want to run the 2to3 conversion on the doctests in Python files,
-you can turn that off by setting ``setuptools.use_2to3_on_doctests = False``.
diff --git a/docs/releases.txt b/docs/releases.txt
deleted file mode 100644
index 3f29334a..00000000
--- a/docs/releases.txt
+++ /dev/null
@@ -1,56 +0,0 @@
-===============
-Release Process
-===============
-
-In order to allow for rapid, predictable releases, Setuptools uses a
-mechanical technique for releases, enacted by Travis following a
-successful build of a tagged release per
-`PyPI deployment <https://docs.travis-ci.com/user/deployment/pypi>`_.
-
-To cut a release, install and run ``bumpversion {part}`` where ``part``
-is major, minor, or patch based on the scope of the changes in the
-release. Then, push the commits to the master branch. If tests pass,
-the release will be uploaded to PyPI.
-
-Bootstrap Bookmark
-------------------
-
-Setuptools has a bootstrap script (ez_setup.py) which is hosted in the
-repository and must be updated with each release (to bump the default version).
-The "published" version of the script is the one indicated by the ``bootstrap``
-branch.
-
-Therefore, the latest bootstrap script can be retrieved by checking out the
-repository at that bookmark. It's also possible to get the bootstrap script for
-any particular release by grabbing the script from that tagged release.
-
-The officially-published location of the bootstrap script is hosted on Python
-infrastructure (#python-infra on freenode) at https://bootstrap.pypa.io and
-is updated every fifteen minutes from the bootstrap script. Sometimes,
-especially when the bootstrap script is rolled back, this
-process doesn't work as expected and requires manual intervention.
-
-Release Frequency
------------------
-
-Some have asked why Setuptools is released so frequently. Because Setuptools
-uses a mechanical release process, it's very easy to make releases whenever the
-code is stable (tests are passing). As a result, the philosophy is to release
-early and often.
-
-While some find the frequent releases somewhat surprising, they only empower
-the user. Although releases are made frequently, users can choose the frequency
-at which they use those releases. If instead Setuptools contributions were only
-released in batches, the user would be constrained to only use Setuptools when
-those official releases were made. With frequent releases, the user can govern
-exactly how often he wishes to update.
-
-Frequent releases also then obviate the need for dev or beta releases in most
-cases. Because releases are made early and often, bugs are discovered and
-corrected quickly, in many cases before other users have yet to encounter them.
-
-Release Managers
-----------------
-
-Additionally, anyone with push access to the master branch has access to cut
-releases.
diff --git a/docs/roadmap.txt b/docs/roadmap.txt
deleted file mode 100644
index 8f175b9f..00000000
--- a/docs/roadmap.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-=======
-Roadmap
-=======
-
-Setuptools is primarily in maintenance mode. The project attempts to address
-user issues, concerns, and feature requests in a timely fashion.
diff --git a/docs/setuptools.txt b/docs/setuptools.txt
deleted file mode 100644
index 57818281..00000000
--- a/docs/setuptools.txt
+++ /dev/null
@@ -1,2663 +0,0 @@
-==================================================
-Building and Distributing Packages with Setuptools
-==================================================
-
-``Setuptools`` is a collection of enhancements to the Python ``distutils``
-(for Python 2.6 and up) that allow developers to more easily build and
-distribute Python packages, especially ones that have dependencies on other
-packages.
-
-Packages built and distributed using ``setuptools`` look to the user like
-ordinary Python packages based on the ``distutils``. Your users don't need to
-install or even know about setuptools in order to use them, and you don't
-have to include the entire setuptools package in your distributions. By
-including just a single `bootstrap module`_ (a 12K .py file), your package will
-automatically download and install ``setuptools`` if the user is building your
-package from source and doesn't have a suitable version already installed.
-
-.. _bootstrap module: https://bootstrap.pypa.io/ez_setup.py
-
-Feature Highlights:
-
-* Automatically find/download/install/upgrade dependencies at build time using
- the `EasyInstall tool <easy_install.html>`_,
- which supports downloading via HTTP, FTP, Subversion, and SourceForge, and
- automatically scans web pages linked from PyPI to find download links. (It's
- the closest thing to CPAN currently available for Python.)
-
-* Create `Python Eggs <http://peak.telecommunity.com/DevCenter/PythonEggs>`_ -
- a single-file importable distribution format
-
-* Enhanced support for accessing data files hosted in zipped packages.
-
-* Automatically include all packages in your source tree, without listing them
- individually in setup.py
-
-* Automatically include all relevant files in your source distributions,
- without needing to create a ``MANIFEST.in`` file, and without having to force
- regeneration of the ``MANIFEST`` file when your source tree changes.
-
-* Automatically generate wrapper scripts or Windows (console and GUI) .exe
- files for any number of "main" functions in your project. (Note: this is not
- a py2exe replacement; the .exe files rely on the local Python installation.)
-
-* Transparent Pyrex support, so that your setup.py can list ``.pyx`` files and
- still work even when the end-user doesn't have Pyrex installed (as long as
- you include the Pyrex-generated C in your source distribution)
-
-* Command aliases - create project-specific, per-user, or site-wide shortcut
- names for commonly used commands and options
-
-* PyPI upload support - upload your source distributions and eggs to PyPI
-
-* Deploy your project in "development mode", such that it's available on
- ``sys.path``, yet can still be edited directly from its source checkout.
-
-* Easily extend the distutils with new commands or ``setup()`` arguments, and
- distribute/reuse your extensions for multiple projects, without copying code.
-
-* Create extensible applications and frameworks that automatically discover
- extensions, using simple "entry points" declared in a project's setup script.
-
-In addition to the PyPI downloads, the development version of ``setuptools``
-is available from the `Python SVN sandbox`_, and in-development versions of the
-`0.6 branch`_ are available as well.
-
-.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06
-
-.. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev
-
-.. contents:: **Table of Contents**
-
-.. _ez_setup.py: `bootstrap module`_
-
-
------------------
-Developer's Guide
------------------
-
-
-Installing ``setuptools``
-=========================
-
-Please follow the `EasyInstall Installation Instructions`_ to install the
-current stable version of setuptools. In particular, be sure to read the
-section on `Custom Installation Locations`_ if you are installing anywhere
-other than Python's ``site-packages`` directory.
-
-.. _EasyInstall Installation Instructions: easy_install.html#installation-instructions
-
-.. _Custom Installation Locations: easy_install.html#custom-installation-locations
-
-If you want the current in-development version of setuptools, you should first
-install a stable version, and then run::
-
- ez_setup.py setuptools==dev
-
-This will download and install the latest development (i.e. unstable) version
-of setuptools from the Python Subversion sandbox.
-
-
-Basic Use
-=========
-
-For basic use of setuptools, just import things from setuptools instead of
-the distutils. Here's a minimal setup script using setuptools::
-
- from setuptools import setup, find_packages
- setup(
- name = "HelloWorld",
- version = "0.1",
- packages = find_packages(),
- )
-
-As you can see, it doesn't take much to use setuptools in a project.
-Run that script in your project folder, alongside the Python packages
-you have developed.
-
-Invoke that script to produce eggs, upload to
-PyPI, and automatically include all packages in the directory where the
-setup.py lives. See the `Command Reference`_ section below to see what
-commands you can give to this setup script. For example,
-to produce a source distribution, simply invoke::
-
- python setup.py sdist
-
-Of course, before you release your project to PyPI, you'll want to add a bit
-more information to your setup script to help people find or learn about your
-project. And maybe your project will have grown by then to include a few
-dependencies, and perhaps some data files and scripts::
-
- from setuptools import setup, find_packages
- setup(
- name = "HelloWorld",
- version = "0.1",
- packages = find_packages(),
- scripts = ['say_hello.py'],
-
- # Project uses reStructuredText, so ensure that the docutils get
- # installed or upgraded on the target machine
- install_requires = ['docutils>=0.3'],
-
- package_data = {
- # If any package contains *.txt or *.rst files, include them:
- '': ['*.txt', '*.rst'],
- # And include any *.msg files found in the 'hello' package, too:
- 'hello': ['*.msg'],
- },
-
- # metadata for upload to PyPI
- author = "Me",
- author_email = "me@example.com",
- description = "This is an Example Package",
- license = "PSF",
- keywords = "hello world example examples",
- url = "http://example.com/HelloWorld/", # project home page, if any
-
- # could also include long_description, download_url, classifiers, etc.
- )
-
-In the sections that follow, we'll explain what most of these ``setup()``
-arguments do (except for the metadata ones), and the various ways you might use
-them in your own project(s).
-
-
-Specifying Your Project's Version
----------------------------------
-
-Setuptools can work well with most versioning schemes; there are, however, a
-few special things to watch out for, in order to ensure that setuptools and
-EasyInstall can always tell what version of your package is newer than another
-version. Knowing these things will also help you correctly specify what
-versions of other projects your project depends on.
-
-A version consists of an alternating series of release numbers and pre-release
-or post-release tags. A release number is a series of digits punctuated by
-dots, such as ``2.4`` or ``0.5``. Each series of digits is treated
-numerically, so releases ``2.1`` and ``2.1.0`` are different ways to spell the
-same release number, denoting the first subrelease of release 2. But ``2.10``
-is the *tenth* subrelease of release 2, and so is a different and newer release
-from ``2.1`` or ``2.1.0``. Leading zeros within a series of digits are also
-ignored, so ``2.01`` is the same as ``2.1``, and different from ``2.0.1``.
-
-Following a release number, you can have either a pre-release or post-release
-tag. Pre-release tags make a version be considered *older* than the version
-they are appended to. So, revision ``2.4`` is *newer* than revision ``2.4c1``,
-which in turn is newer than ``2.4b1`` or ``2.4a1``. Postrelease tags make
-a version be considered *newer* than the version they are appended to. So,
-revisions like ``2.4-1`` and ``2.4pl3`` are newer than ``2.4``, but are *older*
-than ``2.4.1`` (which has a higher release number).
-
-A pre-release tag is a series of letters that are alphabetically before
-"final". Some examples of prerelease tags would include ``alpha``, ``beta``,
-``a``, ``c``, ``dev``, and so on. You do not have to place a dot or dash
-before the prerelease tag if it's immediately after a number, but it's okay to
-do so if you prefer. Thus, ``2.4c1`` and ``2.4.c1`` and ``2.4-c1`` all
-represent release candidate 1 of version ``2.4``, and are treated as identical
-by setuptools.
-
-In addition, there are three special prerelease tags that are treated as if
-they were the letter ``c``: ``pre``, ``preview``, and ``rc``. So, version
-``2.4rc1``, ``2.4pre1`` and ``2.4preview1`` are all the exact same version as
-``2.4c1``, and are treated as identical by setuptools.
-
-A post-release tag is either a series of letters that are alphabetically
-greater than or equal to "final", or a dash (``-``). Post-release tags are
-generally used to separate patch numbers, port numbers, build numbers, revision
-numbers, or date stamps from the release number. For example, the version
-``2.4-r1263`` might denote Subversion revision 1263 of a post-release patch of
-version ``2.4``. Or you might use ``2.4-20051127`` to denote a date-stamped
-post-release.
-
-Notice that after each pre or post-release tag, you are free to place another
-release number, followed again by more pre- or post-release tags. For example,
-``0.6a9.dev-r41475`` could denote Subversion revision 41475 of the in-
-development version of the ninth alpha of release 0.6. Notice that ``dev`` is
-a pre-release tag, so this version is a *lower* version number than ``0.6a9``,
-which would be the actual ninth alpha of release 0.6. But the ``-r41475`` is
-a post-release tag, so this version is *newer* than ``0.6a9.dev``.
-
-For the most part, setuptools' interpretation of version numbers is intuitive,
-but here are a few tips that will keep you out of trouble in the corner cases:
-
-* Don't stick adjoining pre-release tags together without a dot or number
- between them. Version ``1.9adev`` is the ``adev`` prerelease of ``1.9``,
- *not* a development pre-release of ``1.9a``. Use ``.dev`` instead, as in
- ``1.9a.dev``, or separate the prerelease tags with a number, as in
- ``1.9a0dev``. ``1.9a.dev``, ``1.9a0dev``, and even ``1.9.a.dev`` are
- identical versions from setuptools' point of view, so you can use whatever
- scheme you prefer.
-
-* If you want to be certain that your chosen numbering scheme works the way
- you think it will, you can use the ``pkg_resources.parse_version()`` function
- to compare different version numbers::
-
- >>> from pkg_resources import parse_version
- >>> parse_version('1.9.a.dev') == parse_version('1.9a0dev')
- True
- >>> parse_version('2.1-rc2') < parse_version('2.1')
- True
- >>> parse_version('0.6a9dev-r41475') < parse_version('0.6a9')
- True
-
-Once you've decided on a version numbering scheme for your project, you can
-have setuptools automatically tag your in-development releases with various
-pre- or post-release tags. See the following sections for more details:
-
-* `Tagging and "Daily Build" or "Snapshot" Releases`_
-* `Managing "Continuous Releases" Using Subversion`_
-* The `egg_info`_ command
-
-
-New and Changed ``setup()`` Keywords
-====================================
-
-The following keyword arguments to ``setup()`` are added or changed by
-``setuptools``. All of them are optional; you do not have to supply them
-unless you need the associated ``setuptools`` feature.
-
-``include_package_data``
- If set to ``True``, this tells ``setuptools`` to automatically include any
- data files it finds inside your package directories that are specified by
- your ``MANIFEST.in`` file. For more information, see the section below on
- `Including Data Files`_.
-
-``exclude_package_data``
- A dictionary mapping package names to lists of glob patterns that should
- be *excluded* from your package directories. You can use this to trim back
- any excess files included by ``include_package_data``. For a complete
- description and examples, see the section below on `Including Data Files`_.
-
-``package_data``
- A dictionary mapping package names to lists of glob patterns. For a
- complete description and examples, see the section below on `Including
- Data Files`_. You do not need to use this option if you are using
- ``include_package_data``, unless you need to add e.g. files that are
- generated by your setup script and build process. (And are therefore not
- in source control or are files that you don't want to include in your
- source distribution.)
-
-``zip_safe``
- A boolean (True or False) flag specifying whether the project can be
- safely installed and run from a zip file. If this argument is not
- supplied, the ``bdist_egg`` command will have to analyze all of your
- project's contents for possible problems each time it builds an egg.
-
-``install_requires``
- A string or list of strings specifying what other distributions need to
- be installed when this one is. See the section below on `Declaring
- Dependencies`_ for details and examples of the format of this argument.
-
-``entry_points``
- A dictionary mapping entry point group names to strings or lists of strings
- defining the entry points. Entry points are used to support dynamic
- discovery of services or plugins provided by a project. See `Dynamic
- Discovery of Services and Plugins`_ for details and examples of the format
- of this argument. In addition, this keyword is used to support `Automatic
- Script Creation`_.
-
-``extras_require``
- A dictionary mapping names of "extras" (optional features of your project)
- to strings or lists of strings specifying what other distributions must be
- installed to support those features. See the section below on `Declaring
- Dependencies`_ for details and examples of the format of this argument.
-
-``setup_requires``
- A string or list of strings specifying what other distributions need to
- be present in order for the *setup script* to run. ``setuptools`` will
- attempt to obtain these (even going so far as to download them using
- ``EasyInstall``) before processing the rest of the setup script or commands.
- This argument is needed if you are using distutils extensions as part of
- your build process; for example, extensions that process setup() arguments
- and turn them into EGG-INFO metadata files.
-
- (Note: projects listed in ``setup_requires`` will NOT be automatically
- installed on the system where the setup script is being run. They are
- simply downloaded to the ./.eggs directory if they're not locally available
- already. If you want them to be installed, as well as being available
- when the setup script is run, you should add them to ``install_requires``
- **and** ``setup_requires``.)
-
-``dependency_links``
- A list of strings naming URLs to be searched when satisfying dependencies.
- These links will be used if needed to install packages specified by
- ``setup_requires`` or ``tests_require``. They will also be written into
- the egg's metadata for use by tools like EasyInstall to use when installing
- an ``.egg`` file.
-
-``namespace_packages``
- A list of strings naming the project's "namespace packages". A namespace
- package is a package that may be split across multiple project
- distributions. For example, Zope 3's ``zope`` package is a namespace
- package, because subpackages like ``zope.interface`` and ``zope.publisher``
- may be distributed separately. The egg runtime system can automatically
- merge such subpackages into a single parent package at runtime, as long
- as you declare them in each project that contains any subpackages of the
- namespace package, and as long as the namespace package's ``__init__.py``
- does not contain any code other than a namespace declaration. See the
- section below on `Namespace Packages`_ for more information.
-
-``test_suite``
- A string naming a ``unittest.TestCase`` subclass (or a package or module
- containing one or more of them, or a method of such a subclass), or naming
- a function that can be called with no arguments and returns a
- ``unittest.TestSuite``. If the named suite is a module, and the module
- has an ``additional_tests()`` function, it is called and the results are
- added to the tests to be run. If the named suite is a package, any
- submodules and subpackages are recursively added to the overall test suite.
-
- Specifying this argument enables use of the `test`_ command to run the
- specified test suite, e.g. via ``setup.py test``. See the section on the
- `test`_ command below for more details.
-
-``tests_require``
- If your project's tests need one or more additional packages besides those
- needed to install it, you can use this option to specify them. It should
- be a string or list of strings specifying what other distributions need to
- be present for the package's tests to run. When you run the ``test``
- command, ``setuptools`` will attempt to obtain these (even going
- so far as to download them using ``EasyInstall``). Note that these
- required projects will *not* be installed on the system where the tests
- are run, but only downloaded to the project's setup directory if they're
- not already installed locally.
-
-.. _test_loader:
-
-``test_loader``
- If you would like to use a different way of finding tests to run than what
- setuptools normally uses, you can specify a module name and class name in
- this argument. The named class must be instantiable with no arguments, and
- its instances must support the ``loadTestsFromNames()`` method as defined
- in the Python ``unittest`` module's ``TestLoader`` class. Setuptools will
- pass only one test "name" in the `names` argument: the value supplied for
- the ``test_suite`` argument. The loader you specify may interpret this
- string in any way it likes, as there are no restrictions on what may be
- contained in a ``test_suite`` string.
-
- The module name and class name must be separated by a ``:``. The default
- value of this argument is ``"setuptools.command.test:ScanningLoader"``. If
- you want to use the default ``unittest`` behavior, you can specify
- ``"unittest:TestLoader"`` as your ``test_loader`` argument instead. This
- will prevent automatic scanning of submodules and subpackages.
-
- The module and class you specify here may be contained in another package,
- as long as you use the ``tests_require`` option to ensure that the package
- containing the loader class is available when the ``test`` command is run.
-
-``eager_resources``
- A list of strings naming resources that should be extracted together, if
- any of them is needed, or if any C extensions included in the project are
- imported. This argument is only useful if the project will be installed as
- a zipfile, and there is a need to have all of the listed resources be
- extracted to the filesystem *as a unit*. Resources listed here
- should be '/'-separated paths, relative to the source root, so to list a
- resource ``foo.png`` in package ``bar.baz``, you would include the string
- ``bar/baz/foo.png`` in this argument.
-
- If you only need to obtain resources one at a time, or you don't have any C
- extensions that access other files in the project (such as data files or
- shared libraries), you probably do NOT need this argument and shouldn't
- mess with it. For more details on how this argument works, see the section
- below on `Automatic Resource Extraction`_.
-
-``use_2to3``
- Convert the source code from Python 2 to Python 3 with 2to3 during the
- build process. See :doc:`python3` for more details.
-
-``convert_2to3_doctests``
- List of doctest source files that need to be converted with 2to3.
- See :doc:`python3` for more details.
-
-``use_2to3_fixers``
- A list of modules to search for additional fixers to be used during
- the 2to3 conversion. See :doc:`python3` for more details.
-
-
-Using ``find_packages()``
--------------------------
-
-For simple projects, it's usually easy enough to manually add packages to
-the ``packages`` argument of ``setup()``. However, for very large projects
-(Twisted, PEAK, Zope, Chandler, etc.), it can be a big burden to keep the
-package list updated. That's what ``setuptools.find_packages()`` is for.
-
-``find_packages()`` takes a source directory and two lists of package name
-patterns to exclude and include. If omitted, the source directory defaults to
-the same
-directory as the setup script. Some projects use a ``src`` or ``lib``
-directory as the root of their source tree, and those projects would of course
-use ``"src"`` or ``"lib"`` as the first argument to ``find_packages()``. (And
-such projects also need something like ``package_dir = {'':'src'}`` in their
-``setup()`` arguments, but that's just a normal distutils thing.)
-
-Anyway, ``find_packages()`` walks the target directory, filtering by inclusion
-patterns, and finds Python packages (any directory). On Python 3.2 and
-earlier, packages are only recognized if they include an ``__init__.py`` file.
-Finally, exclusion patterns are applied to remove matching packages.
-
-Inclusion and exclusion patterns are package names, optionally including
-wildcards. For
-example, ``find_packages(exclude=["*.tests"])`` will exclude all packages whose
-last name part is ``tests``. Or, ``find_packages(exclude=["*.tests",
-"*.tests.*"])`` will also exclude any subpackages of packages named ``tests``,
-but it still won't exclude a top-level ``tests`` package or the children
-thereof. In fact, if you really want no ``tests`` packages at all, you'll need
-something like this::
-
- find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"])
-
-in order to cover all the bases. Really, the exclusion patterns are intended
-to cover simpler use cases than this, like excluding a single, specified
-package and its subpackages.
-
-Regardless of the parameters, the ``find_packages()``
-function returns a list of package names suitable for use as the ``packages``
-argument to ``setup()``, and so is usually the easiest way to set that
-argument in your setup script. Especially since it frees you from having to
-remember to modify your setup script whenever your project grows additional
-top-level packages or subpackages.
-
-
-Automatic Script Creation
-=========================
-
-Packaging and installing scripts can be a bit awkward with the distutils. For
-one thing, there's no easy way to have a script's filename match local
-conventions on both Windows and POSIX platforms. For another, you often have
-to create a separate file just for the "main" script, when your actual "main"
-is a function in a module somewhere. And even in Python 2.4, using the ``-m``
-option only works for actual ``.py`` files that aren't installed in a package.
-
-``setuptools`` fixes all of these problems by automatically generating scripts
-for you with the correct extension, and on Windows it will even create an
-``.exe`` file so that users don't have to change their ``PATHEXT`` settings.
-The way to use this feature is to define "entry points" in your setup script
-that indicate what function the generated script should import and run. For
-example, to create two console scripts called ``foo`` and ``bar``, and a GUI
-script called ``baz``, you might do something like this::
-
- setup(
- # other arguments here...
- entry_points={
- 'console_scripts': [
- 'foo = my_package.some_module:main_func',
- 'bar = other_module:some_func',
- ],
- 'gui_scripts': [
- 'baz = my_package_gui:start_func',
- ]
- }
- )
-
-When this project is installed on non-Windows platforms (using "setup.py
-install", "setup.py develop", or by using EasyInstall), a set of ``foo``,
-``bar``, and ``baz`` scripts will be installed that import ``main_func`` and
-``some_func`` from the specified modules. The functions you specify are called
-with no arguments, and their return value is passed to ``sys.exit()``, so you
-can return an errorlevel or message to print to stderr.
-
-On Windows, a set of ``foo.exe``, ``bar.exe``, and ``baz.exe`` launchers are
-created, alongside a set of ``foo.py``, ``bar.py``, and ``baz.pyw`` files. The
-``.exe`` wrappers find and execute the right version of Python to run the
-``.py`` or ``.pyw`` file.
-
-You may define as many "console script" and "gui script" entry points as you
-like, and each one can optionally specify "extras" that it depends on, that
-will be added to ``sys.path`` when the script is run. For more information on
-"extras", see the section below on `Declaring Extras`_. For more information
-on "entry points" in general, see the section below on `Dynamic Discovery of
-Services and Plugins`_.
-
-
-"Eggsecutable" Scripts
-----------------------
-
-Occasionally, there are situations where it's desirable to make an ``.egg``
-file directly executable. You can do this by including an entry point such
-as the following::
-
- setup(
- # other arguments here...
- entry_points = {
- 'setuptools.installation': [
- 'eggsecutable = my_package.some_module:main_func',
- ]
- }
- )
-
-Any eggs built from the above setup script will include a short executable
-prelude that imports and calls ``main_func()`` from ``my_package.some_module``.
-The prelude can be run on Unix-like platforms (including Mac and Linux) by
-invoking the egg with ``/bin/sh``, or by enabling execute permissions on the
-``.egg`` file. For the executable prelude to run, the appropriate version of
-Python must be available via the ``PATH`` environment variable, under its
-"long" name. That is, if the egg is built for Python 2.3, there must be a
-``python2.3`` executable present in a directory on ``PATH``.
-
-This feature is primarily intended to support ez_setup the installation of
-setuptools itself on non-Windows platforms, but may also be useful for other
-projects as well.
-
-IMPORTANT NOTE: Eggs with an "eggsecutable" header cannot be renamed, or
-invoked via symlinks. They *must* be invoked using their original filename, in
-order to ensure that, once running, ``pkg_resources`` will know what project
-and version is in use. The header script will check this and exit with an
-error if the ``.egg`` file has been renamed or is invoked via a symlink that
-changes its base name.
-
-
-Declaring Dependencies
-======================
-
-``setuptools`` supports automatically installing dependencies when a package is
-installed, and including information about dependencies in Python Eggs (so that
-package management tools like EasyInstall can use the information).
-
-``setuptools`` and ``pkg_resources`` use a common syntax for specifying a
-project's required dependencies. This syntax consists of a project's PyPI
-name, optionally followed by a comma-separated list of "extras" in square
-brackets, optionally followed by a comma-separated list of version
-specifiers. A version specifier is one of the operators ``<``, ``>``, ``<=``,
-``>=``, ``==`` or ``!=``, followed by a version identifier. Tokens may be
-separated by whitespace, but any whitespace or nonstandard characters within a
-project name or version identifier must be replaced with ``-``.
-
-Version specifiers for a given project are internally sorted into ascending
-version order, and used to establish what ranges of versions are acceptable.
-Adjacent redundant conditions are also consolidated (e.g. ``">1, >2"`` becomes
-``">1"``, and ``"<2,<3"`` becomes ``"<3"``). ``"!="`` versions are excised from
-the ranges they fall within. A project's version is then checked for
-membership in the resulting ranges. (Note that providing conflicting conditions
-for the same version (e.g. "<2,>=2" or "==2,!=2") is meaningless and may
-therefore produce bizarre results.)
-
-Here are some example requirement specifiers::
-
- docutils >= 0.3
-
- # comment lines and \ continuations are allowed in requirement strings
- BazSpam ==1.1, ==1.2, ==1.3, ==1.4, ==1.5, \
- ==1.6, ==1.7 # and so are line-end comments
-
- PEAK[FastCGI, reST]>=0.5a4
-
- setuptools==0.5a7
-
-The simplest way to include requirement specifiers is to use the
-``install_requires`` argument to ``setup()``. It takes a string or list of
-strings containing requirement specifiers. If you include more than one
-requirement in a string, each requirement must begin on a new line.
-
-This has three effects:
-
-1. When your project is installed, either by using EasyInstall, ``setup.py
- install``, or ``setup.py develop``, all of the dependencies not already
- installed will be located (via PyPI), downloaded, built (if necessary),
- and installed.
-
-2. Any scripts in your project will be installed with wrappers that verify
- the availability of the specified dependencies at runtime, and ensure that
- the correct versions are added to ``sys.path`` (e.g. if multiple versions
- have been installed).
-
-3. Python Egg distributions will include a metadata file listing the
- dependencies.
-
-Note, by the way, that if you declare your dependencies in ``setup.py``, you do
-*not* need to use the ``require()`` function in your scripts or modules, as
-long as you either install the project or use ``setup.py develop`` to do
-development work on it. (See `"Development Mode"`_ below for more details on
-using ``setup.py develop``.)
-
-
-Dependencies that aren't in PyPI
---------------------------------
-
-If your project depends on packages that aren't registered in PyPI, you may
-still be able to depend on them, as long as they are available for download
-as:
-
-- an egg, in the standard distutils ``sdist`` format,
-- a single ``.py`` file, or
-- a VCS repository (Subversion, Mercurial, or Git).
-
-You just need to add some URLs to the ``dependency_links`` argument to
-``setup()``.
-
-The URLs must be either:
-
-1. direct download URLs,
-2. the URLs of web pages that contain direct download links, or
-3. the repository's URL
-
-In general, it's better to link to web pages, because it is usually less
-complex to update a web page than to release a new version of your project.
-You can also use a SourceForge ``showfiles.php`` link in the case where a
-package you depend on is distributed via SourceForge.
-
-If you depend on a package that's distributed as a single ``.py`` file, you
-must include an ``"#egg=project-version"`` suffix to the URL, to give a project
-name and version number. (Be sure to escape any dashes in the name or version
-by replacing them with underscores.) EasyInstall will recognize this suffix
-and automatically create a trivial ``setup.py`` to wrap the single ``.py`` file
-as an egg.
-
-In the case of a VCS checkout, you should also append ``#egg=project-version``
-in order to identify for what package that checkout should be used. You can
-append ``@REV`` to the URL's path (before the fragment) to specify a revision.
-Additionally, you can also force the VCS being used by prepending the URL with
-a certain prefix. Currently available are:
-
-- ``svn+URL`` for Subversion,
-- ``git+URL`` for Git, and
-- ``hg+URL`` for Mercurial
-
-A more complete example would be:
-
- ``vcs+proto://host/path@revision#egg=project-version``
-
-Be careful with the version. It should match the one inside the project files.
-If you want to disregard the version, you have to omit it both in the
-``requires`` and in the URL's fragment.
-
-This will do a checkout (or a clone, in Git and Mercurial parlance) to a
-temporary folder and run ``setup.py bdist_egg``.
-
-The ``dependency_links`` option takes the form of a list of URL strings. For
-example, the below will cause EasyInstall to search the specified page for
-eggs or source distributions, if the package's dependencies aren't already
-installed::
-
- setup(
- ...
- dependency_links = [
- "http://peak.telecommunity.com/snapshots/"
- ],
- )
-
-
-.. _Declaring Extras:
-
-
-Declaring "Extras" (optional features with their own dependencies)
-------------------------------------------------------------------
-
-Sometimes a project has "recommended" dependencies, that are not required for
-all uses of the project. For example, a project might offer optional PDF
-output if ReportLab is installed, and reStructuredText support if docutils is
-installed. These optional features are called "extras", and setuptools allows
-you to define their requirements as well. In this way, other projects that
-require these optional features can force the additional requirements to be
-installed, by naming the desired extras in their ``install_requires``.
-
-For example, let's say that Project A offers optional PDF and reST support::
-
- setup(
- name="Project-A",
- ...
- extras_require = {
- 'PDF': ["ReportLab>=1.2", "RXP"],
- 'reST': ["docutils>=0.3"],
- }
- )
-
-As you can see, the ``extras_require`` argument takes a dictionary mapping
-names of "extra" features, to strings or lists of strings describing those
-features' requirements. These requirements will *not* be automatically
-installed unless another package depends on them (directly or indirectly) by
-including the desired "extras" in square brackets after the associated project
-name. (Or if the extras were listed in a requirement spec on the EasyInstall
-command line.)
-
-Extras can be used by a project's `entry points`_ to specify dynamic
-dependencies. For example, if Project A includes a "rst2pdf" script, it might
-declare it like this, so that the "PDF" requirements are only resolved if the
-"rst2pdf" script is run::
-
- setup(
- name="Project-A",
- ...
- entry_points = {
- 'console_scripts': [
- 'rst2pdf = project_a.tools.pdfgen [PDF]',
- 'rst2html = project_a.tools.htmlgen',
- # more script entry points ...
- ],
- }
- )
-
-Projects can also use another project's extras when specifying dependencies.
-For example, if project B needs "project A" with PDF support installed, it
-might declare the dependency like this::
-
- setup(
- name="Project-B",
- install_requires = ["Project-A[PDF]"],
- ...
- )
-
-This will cause ReportLab to be installed along with project A, if project B is
-installed -- even if project A was already installed. In this way, a project
-can encapsulate groups of optional "downstream dependencies" under a feature
-name, so that packages that depend on it don't have to know what the downstream
-dependencies are. If a later version of Project A builds in PDF support and
-no longer needs ReportLab, or if it ends up needing other dependencies besides
-ReportLab in order to provide PDF support, Project B's setup information does
-not need to change, but the right packages will still be installed if needed.
-
-Note, by the way, that if a project ends up not needing any other packages to
-support a feature, it should keep an empty requirements list for that feature
-in its ``extras_require`` argument, so that packages depending on that feature
-don't break (due to an invalid feature name). For example, if Project A above
-builds in PDF support and no longer needs ReportLab, it could change its
-setup to this::
-
- setup(
- name="Project-A",
- ...
- extras_require = {
- 'PDF': [],
- 'reST': ["docutils>=0.3"],
- }
- )
-
-so that Package B doesn't have to remove the ``[PDF]`` from its requirement
-specifier.
-
-
-Including Data Files
-====================
-
-The distutils have traditionally allowed installation of "data files", which
-are placed in a platform-specific location. However, the most common use case
-for data files distributed with a package is for use *by* the package, usually
-by including the data files in the package directory.
-
-Setuptools offers three ways to specify data files to be included in your
-packages. First, you can simply use the ``include_package_data`` keyword,
-e.g.::
-
- from setuptools import setup, find_packages
- setup(
- ...
- include_package_data = True
- )
-
-This tells setuptools to install any data files it finds in your packages.
-The data files must be specified via the distutils' ``MANIFEST.in`` file.
-(They can also be tracked by a revision control system, using an appropriate
-plugin. See the section below on `Adding Support for Revision Control
-Systems`_ for information on how to write such plugins.)
-
-If you want finer-grained control over what files are included (for example,
-if you have documentation files in your package directories and want to exclude
-them from installation), then you can also use the ``package_data`` keyword,
-e.g.::
-
- from setuptools import setup, find_packages
- setup(
- ...
- package_data = {
- # If any package contains *.txt or *.rst files, include them:
- '': ['*.txt', '*.rst'],
- # And include any *.msg files found in the 'hello' package, too:
- 'hello': ['*.msg'],
- }
- )
-
-The ``package_data`` argument is a dictionary that maps from package names to
-lists of glob patterns. The globs may include subdirectory names, if the data
-files are contained in a subdirectory of the package. For example, if the
-package tree looks like this::
-
- setup.py
- src/
- mypkg/
- __init__.py
- mypkg.txt
- data/
- somefile.dat
- otherdata.dat
-
-The setuptools setup file might look like this::
-
- from setuptools import setup, find_packages
- setup(
- ...
- packages = find_packages('src'), # include all packages under src
- package_dir = {'':'src'}, # tell distutils packages are under src
-
- package_data = {
- # If any package contains *.txt files, include them:
- '': ['*.txt'],
- # And include any *.dat files found in the 'data' subdirectory
- # of the 'mypkg' package, also:
- 'mypkg': ['data/*.dat'],
- }
- )
-
-Notice that if you list patterns in ``package_data`` under the empty string,
-these patterns are used to find files in every package, even ones that also
-have their own patterns listed. Thus, in the above example, the ``mypkg.txt``
-file gets included even though it's not listed in the patterns for ``mypkg``.
-
-Also notice that if you use paths, you *must* use a forward slash (``/``) as
-the path separator, even if you are on Windows. Setuptools automatically
-converts slashes to appropriate platform-specific separators at build time.
-
-(Note: although the ``package_data`` argument was previously only available in
-``setuptools``, it was also added to the Python ``distutils`` package as of
-Python 2.4; there is `some documentation for the feature`__ available on the
-python.org website. If using the setuptools-specific ``include_package_data``
-argument, files specified by ``package_data`` will *not* be automatically
-added to the manifest unless they are listed in the MANIFEST.in file.)
-
-__ http://docs.python.org/dist/node11.html
-
-Sometimes, the ``include_package_data`` or ``package_data`` options alone
-aren't sufficient to precisely define what files you want included. For
-example, you may want to include package README files in your revision control
-system and source distributions, but exclude them from being installed. So,
-setuptools offers an ``exclude_package_data`` option as well, that allows you
-to do things like this::
-
- from setuptools import setup, find_packages
- setup(
- ...
- packages = find_packages('src'), # include all packages under src
- package_dir = {'':'src'}, # tell distutils packages are under src
-
- include_package_data = True, # include everything in source control
-
- # ...but exclude README.txt from all packages
- exclude_package_data = { '': ['README.txt'] },
- )
-
-The ``exclude_package_data`` option is a dictionary mapping package names to
-lists of wildcard patterns, just like the ``package_data`` option. And, just
-as with that option, a key of ``''`` will apply the given pattern(s) to all
-packages. However, any files that match these patterns will be *excluded*
-from installation, even if they were listed in ``package_data`` or were
-included as a result of using ``include_package_data``.
-
-In summary, the three options allow you to:
-
-``include_package_data``
- Accept all data files and directories matched by ``MANIFEST.in``.
-
-``package_data``
- Specify additional patterns to match files and directories that may or may
- not be matched by ``MANIFEST.in`` or found in source control.
-
-``exclude_package_data``
- Specify patterns for data files and directories that should *not* be
- included when a package is installed, even if they would otherwise have
- been included due to the use of the preceding options.
-
-NOTE: Due to the way the distutils build process works, a data file that you
-include in your project and then stop including may be "orphaned" in your
-project's build directories, requiring you to run ``setup.py clean --all`` to
-fully remove them. This may also be important for your users and contributors
-if they track intermediate revisions of your project using Subversion; be sure
-to let them know when you make changes that remove files from inclusion so they
-can run ``setup.py clean --all``.
-
-
-Accessing Data Files at Runtime
--------------------------------
-
-Typically, existing programs manipulate a package's ``__file__`` attribute in
-order to find the location of data files. However, this manipulation isn't
-compatible with PEP 302-based import hooks, including importing from zip files
-and Python Eggs. It is strongly recommended that, if you are using data files,
-you should use the `Resource Management API`_ of ``pkg_resources`` to access
-them. The ``pkg_resources`` module is distributed as part of setuptools, so if
-you're using setuptools to distribute your package, there is no reason not to
-use its resource management API. See also `Accessing Package Resources`_ for
-a quick example of converting code that uses ``__file__`` to use
-``pkg_resources`` instead.
-
-.. _Resource Management API: http://peak.telecommunity.com/DevCenter/PythonEggs#resource-management
-.. _Accessing Package Resources: http://peak.telecommunity.com/DevCenter/PythonEggs#accessing-package-resources
-
-
-Non-Package Data Files
-----------------------
-
-The ``distutils`` normally install general "data files" to a platform-specific
-location (e.g. ``/usr/share``). This feature intended to be used for things
-like documentation, example configuration files, and the like. ``setuptools``
-does not install these data files in a separate location, however. They are
-bundled inside the egg file or directory, alongside the Python modules and
-packages. The data files can also be accessed using the `Resource Management
-API`_, by specifying a ``Requirement`` instead of a package name::
-
- from pkg_resources import Requirement, resource_filename
- filename = resource_filename(Requirement.parse("MyProject"),"sample.conf")
-
-The above code will obtain the filename of the "sample.conf" file in the data
-root of the "MyProject" distribution.
-
-Note, by the way, that this encapsulation of data files means that you can't
-actually install data files to some arbitrary location on a user's machine;
-this is a feature, not a bug. You can always include a script in your
-distribution that extracts and copies your the documentation or data files to
-a user-specified location, at their discretion. If you put related data files
-in a single directory, you can use ``resource_filename()`` with the directory
-name to get a filesystem directory that then can be copied with the ``shutil``
-module. (Even if your package is installed as a zipfile, calling
-``resource_filename()`` on a directory will return an actual filesystem
-directory, whose contents will be that entire subtree of your distribution.)
-
-(Of course, if you're writing a new package, you can just as easily place your
-data files or directories inside one of your packages, rather than using the
-distutils' approach. However, if you're updating an existing application, it
-may be simpler not to change the way it currently specifies these data files.)
-
-
-Automatic Resource Extraction
------------------------------
-
-If you are using tools that expect your resources to be "real" files, or your
-project includes non-extension native libraries or other files that your C
-extensions expect to be able to access, you may need to list those files in
-the ``eager_resources`` argument to ``setup()``, so that the files will be
-extracted together, whenever a C extension in the project is imported.
-
-This is especially important if your project includes shared libraries *other*
-than distutils-built C extensions, and those shared libraries use file
-extensions other than ``.dll``, ``.so``, or ``.dylib``, which are the
-extensions that setuptools 0.6a8 and higher automatically detects as shared
-libraries and adds to the ``native_libs.txt`` file for you. Any shared
-libraries whose names do not end with one of those extensions should be listed
-as ``eager_resources``, because they need to be present in the filesystem when
-he C extensions that link to them are used.
-
-The ``pkg_resources`` runtime for compressed packages will automatically
-extract *all* C extensions and ``eager_resources`` at the same time, whenever
-*any* C extension or eager resource is requested via the ``resource_filename()``
-API. (C extensions are imported using ``resource_filename()`` internally.)
-This ensures that C extensions will see all of the "real" files that they
-expect to see.
-
-Note also that you can list directory resource names in ``eager_resources`` as
-well, in which case the directory's contents (including subdirectories) will be
-extracted whenever any C extension or eager resource is requested.
-
-Please note that if you're not sure whether you need to use this argument, you
-don't! It's really intended to support projects with lots of non-Python
-dependencies and as a last resort for crufty projects that can't otherwise
-handle being compressed. If your package is pure Python, Python plus data
-files, or Python plus C, you really don't need this. You've got to be using
-either C or an external program that needs "real" files in your project before
-there's any possibility of ``eager_resources`` being relevant to your project.
-
-
-Extensible Applications and Frameworks
-======================================
-
-
-.. _Entry Points:
-
-Dynamic Discovery of Services and Plugins
------------------------------------------
-
-``setuptools`` supports creating libraries that "plug in" to extensible
-applications and frameworks, by letting you register "entry points" in your
-project that can be imported by the application or framework.
-
-For example, suppose that a blogging tool wants to support plugins
-that provide translation for various file types to the blog's output format.
-The framework might define an "entry point group" called ``blogtool.parsers``,
-and then allow plugins to register entry points for the file extensions they
-support.
-
-This would allow people to create distributions that contain one or more
-parsers for different file types, and then the blogging tool would be able to
-find the parsers at runtime by looking up an entry point for the file
-extension (or mime type, or however it wants to).
-
-Note that if the blogging tool includes parsers for certain file formats, it
-can register these as entry points in its own setup script, which means it
-doesn't have to special-case its built-in formats. They can just be treated
-the same as any other plugin's entry points would be.
-
-If you're creating a project that plugs in to an existing application or
-framework, you'll need to know what entry points or entry point groups are
-defined by that application or framework. Then, you can register entry points
-in your setup script. Here are a few examples of ways you might register an
-``.rst`` file parser entry point in the ``blogtool.parsers`` entry point group,
-for our hypothetical blogging tool::
-
- setup(
- # ...
- entry_points = {'blogtool.parsers': '.rst = some_module:SomeClass'}
- )
-
- setup(
- # ...
- entry_points = {'blogtool.parsers': ['.rst = some_module:a_func']}
- )
-
- setup(
- # ...
- entry_points = """
- [blogtool.parsers]
- .rst = some.nested.module:SomeClass.some_classmethod [reST]
- """,
- extras_require = dict(reST = "Docutils>=0.3.5")
- )
-
-The ``entry_points`` argument to ``setup()`` accepts either a string with
-``.ini``-style sections, or a dictionary mapping entry point group names to
-either strings or lists of strings containing entry point specifiers. An
-entry point specifier consists of a name and value, separated by an ``=``
-sign. The value consists of a dotted module name, optionally followed by a
-``:`` and a dotted identifier naming an object within the module. It can
-also include a bracketed list of "extras" that are required for the entry
-point to be used. When the invoking application or framework requests loading
-of an entry point, any requirements implied by the associated extras will be
-passed to ``pkg_resources.require()``, so that an appropriate error message
-can be displayed if the needed package(s) are missing. (Of course, the
-invoking app or framework can ignore such errors if it wants to make an entry
-point optional if a requirement isn't installed.)
-
-
-Defining Additional Metadata
-----------------------------
-
-Some extensible applications and frameworks may need to define their own kinds
-of metadata to include in eggs, which they can then access using the
-``pkg_resources`` metadata APIs. Ordinarily, this is done by having plugin
-developers include additional files in their ``ProjectName.egg-info``
-directory. However, since it can be tedious to create such files by hand, you
-may want to create a distutils extension that will create the necessary files
-from arguments to ``setup()``, in much the same way that ``setuptools`` does
-for many of the ``setup()`` arguments it adds. See the section below on
-`Creating distutils Extensions`_ for more details, especially the subsection on
-`Adding new EGG-INFO Files`_.
-
-
-"Development Mode"
-==================
-
-Under normal circumstances, the ``distutils`` assume that you are going to
-build a distribution of your project, not use it in its "raw" or "unbuilt"
-form. If you were to use the ``distutils`` that way, you would have to rebuild
-and reinstall your project every time you made a change to it during
-development.
-
-Another problem that sometimes comes up with the ``distutils`` is that you may
-need to do development on two related projects at the same time. You may need
-to put both projects' packages in the same directory to run them, but need to
-keep them separate for revision control purposes. How can you do this?
-
-Setuptools allows you to deploy your projects for use in a common directory or
-staging area, but without copying any files. Thus, you can edit each project's
-code in its checkout directory, and only need to run build commands when you
-change a project's C extensions or similarly compiled files. You can even
-deploy a project into another project's checkout directory, if that's your
-preferred way of working (as opposed to using a common independent staging area
-or the site-packages directory).
-
-To do this, use the ``setup.py develop`` command. It works very similarly to
-``setup.py install`` or the EasyInstall tool, except that it doesn't actually
-install anything. Instead, it creates a special ``.egg-link`` file in the
-deployment directory, that links to your project's source code. And, if your
-deployment directory is Python's ``site-packages`` directory, it will also
-update the ``easy-install.pth`` file to include your project's source code,
-thereby making it available on ``sys.path`` for all programs using that Python
-installation.
-
-If you have enabled the ``use_2to3`` flag, then of course the ``.egg-link``
-will not link directly to your source code when run under Python 3, since
-that source code would be made for Python 2 and not work under Python 3.
-Instead the ``setup.py develop`` will build Python 3 code under the ``build``
-directory, and link there. This means that after doing code changes you will
-have to run ``setup.py build`` before these changes are picked up by your
-Python 3 installation.
-
-In addition, the ``develop`` command creates wrapper scripts in the target
-script directory that will run your in-development scripts after ensuring that
-all your ``install_requires`` packages are available on ``sys.path``.
-
-You can deploy the same project to multiple staging areas, e.g. if you have
-multiple projects on the same machine that are sharing the same project you're
-doing development work.
-
-When you're done with a given development task, you can remove the project
-source from a staging area using ``setup.py develop --uninstall``, specifying
-the desired staging area if it's not the default.
-
-There are several options to control the precise behavior of the ``develop``
-command; see the section on the `develop`_ command below for more details.
-
-Note that you can also apply setuptools commands to non-setuptools projects,
-using commands like this::
-
- python -c "import setuptools; execfile('setup.py')" develop
-
-That is, you can simply list the normal setup commands and options following
-the quoted part.
-
-
-Distributing a ``setuptools``-based project
-===========================================
-
-Using ``setuptools``... Without bundling it!
----------------------------------------------
-
-Your users might not have ``setuptools`` installed on their machines, or even
-if they do, it might not be the right version. Fixing this is easy; just
-download `ez_setup.py`_, and put it in the same directory as your ``setup.py``
-script. (Be sure to add it to your revision control system, too.) Then add
-these two lines to the very top of your setup script, before the script imports
-anything from setuptools:
-
-.. code-block:: python
-
- import ez_setup
- ez_setup.use_setuptools()
-
-That's it. The ``ez_setup`` module will automatically download a matching
-version of ``setuptools`` from PyPI, if it isn't present on the target system.
-Whenever you install an updated version of setuptools, you should also update
-your projects' ``ez_setup.py`` files, so that a matching version gets installed
-on the target machine(s).
-
-By the way, setuptools supports the new PyPI "upload" command, so you can use
-``setup.py sdist upload`` or ``setup.py bdist_egg upload`` to upload your
-source or egg distributions respectively. Your project's current version must
-be registered with PyPI first, of course; you can use ``setup.py register`` to
-do that. Or you can do it all in one step, e.g. ``setup.py register sdist
-bdist_egg upload`` will register the package, build source and egg
-distributions, and then upload them both to PyPI, where they'll be easily
-found by other projects that depend on them.
-
-(By the way, if you need to distribute a specific version of ``setuptools``,
-you can specify the exact version and base download URL as parameters to the
-``use_setuptools()`` function. See the function's docstring for details.)
-
-
-What Your Users Should Know
----------------------------
-
-In general, a setuptools-based project looks just like any distutils-based
-project -- as long as your users have an internet connection and are installing
-to ``site-packages``, that is. But for some users, these conditions don't
-apply, and they may become frustrated if this is their first encounter with
-a setuptools-based project. To keep these users happy, you should review the
-following topics in your project's installation instructions, if they are
-relevant to your project and your target audience isn't already familiar with
-setuptools and ``easy_install``.
-
-Network Access
- If your project is using ``ez_setup``, you should inform users of the
- need to either have network access, or to preinstall the correct version of
- setuptools using the `EasyInstall installation instructions`_. Those
- instructions also have tips for dealing with firewalls as well as how to
- manually download and install setuptools.
-
-Custom Installation Locations
- You should inform your users that if they are installing your project to
- somewhere other than the main ``site-packages`` directory, they should
- first install setuptools using the instructions for `Custom Installation
- Locations`_, before installing your project.
-
-Your Project's Dependencies
- If your project depends on other projects that may need to be downloaded
- from PyPI or elsewhere, you should list them in your installation
- instructions, or tell users how to find out what they are. While most
- users will not need this information, any users who don't have unrestricted
- internet access may have to find, download, and install the other projects
- manually. (Note, however, that they must still install those projects
- using ``easy_install``, or your project will not know they are installed,
- and your setup script will try to download them again.)
-
- If you want to be especially friendly to users with limited network access,
- you may wish to build eggs for your project and its dependencies, making
- them all available for download from your site, or at least create a page
- with links to all of the needed eggs. In this way, users with limited
- network access can manually download all the eggs to a single directory,
- then use the ``-f`` option of ``easy_install`` to specify the directory
- to find eggs in. Users who have full network access can just use ``-f``
- with the URL of your download page, and ``easy_install`` will find all the
- needed eggs using your links directly. This is also useful when your
- target audience isn't able to compile packages (e.g. most Windows users)
- and your package or some of its dependencies include C code.
-
-Revision Control System Users and Co-Developers
- Users and co-developers who are tracking your in-development code using
- a revision control system should probably read this manual's sections
- regarding such development. Alternately, you may wish to create a
- quick-reference guide containing the tips from this manual that apply to
- your particular situation. For example, if you recommend that people use
- ``setup.py develop`` when tracking your in-development code, you should let
- them know that this needs to be run after every update or commit.
-
- Similarly, if you remove modules or data files from your project, you
- should remind them to run ``setup.py clean --all`` and delete any obsolete
- ``.pyc`` or ``.pyo``. (This tip applies to the distutils in general, not
- just setuptools, but not everybody knows about them; be kind to your users
- by spelling out your project's best practices rather than leaving them
- guessing.)
-
-Creating System Packages
- Some users want to manage all Python packages using a single package
- manager, and sometimes that package manager isn't ``easy_install``!
- Setuptools currently supports ``bdist_rpm``, ``bdist_wininst``, and
- ``bdist_dumb`` formats for system packaging. If a user has a locally-
- installed "bdist" packaging tool that internally uses the distutils
- ``install`` command, it should be able to work with ``setuptools``. Some
- examples of "bdist" formats that this should work with include the
- ``bdist_nsi`` and ``bdist_msi`` formats for Windows.
-
- However, packaging tools that build binary distributions by running
- ``setup.py install`` on the command line or as a subprocess will require
- modification to work with setuptools. They should use the
- ``--single-version-externally-managed`` option to the ``install`` command,
- combined with the standard ``--root`` or ``--record`` options.
- See the `install command`_ documentation below for more details. The
- ``bdist_deb`` command is an example of a command that currently requires
- this kind of patching to work with setuptools.
-
- If you or your users have a problem building a usable system package for
- your project, please report the problem via the mailing list so that
- either the "bdist" tool in question or setuptools can be modified to
- resolve the issue.
-
-
-Setting the ``zip_safe`` flag
------------------------------
-
-For some use cases (such as bundling as part of a larger application), Python
-packages may be run directly from a zip file.
-Not all packages, however, are capable of running in compressed form, because
-they may expect to be able to access either source code or data files as
-normal operating system files. So, ``setuptools`` can install your project
-as a zipfile or a directory, and its default choice is determined by the
-project's ``zip_safe`` flag.
-
-You can pass a True or False value for the ``zip_safe`` argument to the
-``setup()`` function, or you can omit it. If you omit it, the ``bdist_egg``
-command will analyze your project's contents to see if it can detect any
-conditions that would prevent it from working in a zipfile. It will output
-notices to the console about any such conditions that it finds.
-
-Currently, this analysis is extremely conservative: it will consider the
-project unsafe if it contains any C extensions or datafiles whatsoever. This
-does *not* mean that the project can't or won't work as a zipfile! It just
-means that the ``bdist_egg`` authors aren't yet comfortable asserting that
-the project *will* work. If the project contains no C or data files, and does
-no ``__file__`` or ``__path__`` introspection or source code manipulation, then
-there is an extremely solid chance the project will work when installed as a
-zipfile. (And if the project uses ``pkg_resources`` for all its data file
-access, then C extensions and other data files shouldn't be a problem at all.
-See the `Accessing Data Files at Runtime`_ section above for more information.)
-
-However, if ``bdist_egg`` can't be *sure* that your package will work, but
-you've checked over all the warnings it issued, and you are either satisfied it
-*will* work (or if you want to try it for yourself), then you should set
-``zip_safe`` to ``True`` in your ``setup()`` call. If it turns out that it
-doesn't work, you can always change it to ``False``, which will force
-``setuptools`` to install your project as a directory rather than as a zipfile.
-
-Of course, the end-user can still override either decision, if they are using
-EasyInstall to install your package. And, if you want to override for testing
-purposes, you can just run ``setup.py easy_install --zip-ok .`` or ``setup.py
-easy_install --always-unzip .`` in your project directory. to install the
-package as a zipfile or directory, respectively.
-
-In the future, as we gain more experience with different packages and become
-more satisfied with the robustness of the ``pkg_resources`` runtime, the
-"zip safety" analysis may become less conservative. However, we strongly
-recommend that you determine for yourself whether your project functions
-correctly when installed as a zipfile, correct any problems if you can, and
-then make an explicit declaration of ``True`` or ``False`` for the ``zip_safe``
-flag, so that it will not be necessary for ``bdist_egg`` or ``EasyInstall`` to
-try to guess whether your project can work as a zipfile.
-
-
-Namespace Packages
-------------------
-
-Sometimes, a large package is more useful if distributed as a collection of
-smaller eggs. However, Python does not normally allow the contents of a
-package to be retrieved from more than one location. "Namespace packages"
-are a solution for this problem. When you declare a package to be a namespace
-package, it means that the package has no meaningful contents in its
-``__init__.py``, and that it is merely a container for modules and subpackages.
-
-The ``pkg_resources`` runtime will then automatically ensure that the contents
-of namespace packages that are spread over multiple eggs or directories are
-combined into a single "virtual" package.
-
-The ``namespace_packages`` argument to ``setup()`` lets you declare your
-project's namespace packages, so that they will be included in your project's
-metadata. The argument should list the namespace packages that the egg
-participates in. For example, the ZopeInterface project might do this::
-
- setup(
- # ...
- namespace_packages = ['zope']
- )
-
-because it contains a ``zope.interface`` package that lives in the ``zope``
-namespace package. Similarly, a project for a standalone ``zope.publisher``
-would also declare the ``zope`` namespace package. When these projects are
-installed and used, Python will see them both as part of a "virtual" ``zope``
-package, even though they will be installed in different locations.
-
-Namespace packages don't have to be top-level packages. For example, Zope 3's
-``zope.app`` package is a namespace package, and in the future PEAK's
-``peak.util`` package will be too.
-
-Note, by the way, that your project's source tree must include the namespace
-packages' ``__init__.py`` files (and the ``__init__.py`` of any parent
-packages), in a normal Python package layout. These ``__init__.py`` files
-*must* contain the line::
-
- __import__('pkg_resources').declare_namespace(__name__)
-
-This code ensures that the namespace package machinery is operating and that
-the current package is registered as a namespace package.
-
-You must NOT include any other code and data in a namespace package's
-``__init__.py``. Even though it may appear to work during development, or when
-projects are installed as ``.egg`` files, it will not work when the projects
-are installed using "system" packaging tools -- in such cases the
-``__init__.py`` files will not be installed, let alone executed.
-
-You must include the ``declare_namespace()`` line in the ``__init__.py`` of
-*every* project that has contents for the namespace package in question, in
-order to ensure that the namespace will be declared regardless of which
-project's copy of ``__init__.py`` is loaded first. If the first loaded
-``__init__.py`` doesn't declare it, it will never *be* declared, because no
-other copies will ever be loaded!
-
-
-TRANSITIONAL NOTE
-~~~~~~~~~~~~~~~~~
-
-Setuptools automatically calls ``declare_namespace()`` for you at runtime,
-but future versions may *not*. This is because the automatic declaration
-feature has some negative side effects, such as needing to import all namespace
-packages during the initialization of the ``pkg_resources`` runtime, and also
-the need for ``pkg_resources`` to be explicitly imported before any namespace
-packages work at all. In some future releases, you'll be responsible
-for including your own declaration lines, and the automatic declaration feature
-will be dropped to get rid of the negative side effects.
-
-During the remainder of the current development cycle, therefore, setuptools
-will warn you about missing ``declare_namespace()`` calls in your
-``__init__.py`` files, and you should correct these as soon as possible
-before the compatibility support is removed.
-Namespace packages without declaration lines will not work
-correctly once a user has upgraded to a later version, so it's important that
-you make this change now in order to avoid having your code break in the field.
-Our apologies for the inconvenience, and thank you for your patience.
-
-
-
-Tagging and "Daily Build" or "Snapshot" Releases
-------------------------------------------------
-
-When a set of related projects are under development, it may be important to
-track finer-grained version increments than you would normally use for e.g.
-"stable" releases. While stable releases might be measured in dotted numbers
-with alpha/beta/etc. status codes, development versions of a project often
-need to be tracked by revision or build number or even build date. This is
-especially true when projects in development need to refer to one another, and
-therefore may literally need an up-to-the-minute version of something!
-
-To support these scenarios, ``setuptools`` allows you to "tag" your source and
-egg distributions by adding one or more of the following to the project's
-"official" version identifier:
-
-* A manually-specified pre-release tag, such as "build" or "dev", or a
- manually-specified post-release tag, such as a build or revision number
- (``--tag-build=STRING, -bSTRING``)
-
-* A "last-modified revision number" string generated automatically from
- Subversion's metadata (assuming your project is being built from a Subversion
- "working copy") (``--tag-svn-revision, -r``)
-
-* An 8-character representation of the build date (``--tag-date, -d``), as
- a postrelease tag
-
-You can add these tags by adding ``egg_info`` and the desired options to
-the command line ahead of the ``sdist`` or ``bdist`` commands that you want
-to generate a daily build or snapshot for. See the section below on the
-`egg_info`_ command for more details.
-
-(Also, before you release your project, be sure to see the section above on
-`Specifying Your Project's Version`_ for more information about how pre- and
-post-release tags affect how setuptools and EasyInstall interpret version
-numbers. This is important in order to make sure that dependency processing
-tools will know which versions of your project are newer than others.)
-
-Finally, if you are creating builds frequently, and either building them in a
-downloadable location or are copying them to a distribution server, you should
-probably also check out the `rotate`_ command, which lets you automatically
-delete all but the N most-recently-modified distributions matching a glob
-pattern. So, you can use a command line like::
-
- setup.py egg_info -rbDEV bdist_egg rotate -m.egg -k3
-
-to build an egg whose version info includes 'DEV-rNNNN' (where NNNN is the
-most recent Subversion revision that affected the source tree), and then
-delete any egg files from the distribution directory except for the three
-that were built most recently.
-
-If you have to manage automated builds for multiple packages, each with
-different tagging and rotation policies, you may also want to check out the
-`alias`_ command, which would let each package define an alias like ``daily``
-that would perform the necessary tag, build, and rotate commands. Then, a
-simpler script or cron job could just run ``setup.py daily`` in each project
-directory. (And, you could also define sitewide or per-user default versions
-of the ``daily`` alias, so that projects that didn't define their own would
-use the appropriate defaults.)
-
-
-Generating Source Distributions
--------------------------------
-
-``setuptools`` enhances the distutils' default algorithm for source file
-selection with pluggable endpoints for looking up files to include. If you are
-using a revision control system, and your source distributions only need to
-include files that you're tracking in revision control, use a corresponding
-plugin instead of writing a ``MANIFEST.in`` file. See the section below on
-`Adding Support for Revision Control Systems`_ for information on plugins.
-
-If you need to include automatically generated files, or files that are kept in
-an unsupported revision control system, you'll need to create a ``MANIFEST.in``
-file to specify any files that the default file location algorithm doesn't
-catch. See the distutils documentation for more information on the format of
-the ``MANIFEST.in`` file.
-
-But, be sure to ignore any part of the distutils documentation that deals with
-``MANIFEST`` or how it's generated from ``MANIFEST.in``; setuptools shields you
-from these issues and doesn't work the same way in any case. Unlike the
-distutils, setuptools regenerates the source distribution manifest file
-every time you build a source distribution, and it builds it inside the
-project's ``.egg-info`` directory, out of the way of your main project
-directory. You therefore need not worry about whether it is up-to-date or not.
-
-Indeed, because setuptools' approach to determining the contents of a source
-distribution is so much simpler, its ``sdist`` command omits nearly all of
-the options that the distutils' more complex ``sdist`` process requires. For
-all practical purposes, you'll probably use only the ``--formats`` option, if
-you use any option at all.
-
-
-Making your package available for EasyInstall
----------------------------------------------
-
-If you use the ``register`` command (``setup.py register``) to register your
-package with PyPI, that's most of the battle right there. (See the
-`docs for the register command`_ for more details.)
-
-.. _docs for the register command: http://docs.python.org/dist/package-index.html
-
-If you also use the `upload`_ command to upload actual distributions of your
-package, that's even better, because EasyInstall will be able to find and
-download them directly from your project's PyPI page.
-
-However, there may be reasons why you don't want to upload distributions to
-PyPI, and just want your existing distributions (or perhaps a Subversion
-checkout) to be used instead.
-
-So here's what you need to do before running the ``register`` command. There
-are three ``setup()`` arguments that affect EasyInstall:
-
-``url`` and ``download_url``
- These become links on your project's PyPI page. EasyInstall will examine
- them to see if they link to a package ("primary links"), or whether they are
- HTML pages. If they're HTML pages, EasyInstall scans all HREF's on the
- page for primary links
-
-``long_description``
- EasyInstall will check any URLs contained in this argument to see if they
- are primary links.
-
-A URL is considered a "primary link" if it is a link to a .tar.gz, .tgz, .zip,
-.egg, .egg.zip, .tar.bz2, or .exe file, or if it has an ``#egg=project`` or
-``#egg=project-version`` fragment identifier attached to it. EasyInstall
-attempts to determine a project name and optional version number from the text
-of a primary link *without* downloading it. When it has found all the primary
-links, EasyInstall will select the best match based on requested version,
-platform compatibility, and other criteria.
-
-So, if your ``url`` or ``download_url`` point either directly to a downloadable
-source distribution, or to HTML page(s) that have direct links to such, then
-EasyInstall will be able to locate downloads automatically. If you want to
-make Subversion checkouts available, then you should create links with either
-``#egg=project`` or ``#egg=project-version`` added to the URL. You should
-replace ``project`` and ``version`` with the values they would have in an egg
-filename. (Be sure to actually generate an egg and then use the initial part
-of the filename, rather than trying to guess what the escaped form of the
-project name and version number will be.)
-
-Note that Subversion checkout links are of lower precedence than other kinds
-of distributions, so EasyInstall will not select a Subversion checkout for
-downloading unless it has a version included in the ``#egg=`` suffix, and
-it's a higher version than EasyInstall has seen in any other links for your
-project.
-
-As a result, it's a common practice to use mark checkout URLs with a version of
-"dev" (i.e., ``#egg=projectname-dev``), so that users can do something like
-this::
-
- easy_install --editable projectname==dev
-
-in order to check out the in-development version of ``projectname``.
-
-
-Managing "Continuous Releases" Using Subversion
------------------------------------------------
-
-If you expect your users to track in-development versions of your project via
-Subversion, there are a few additional steps you should take to ensure that
-things work smoothly with EasyInstall. First, you should add the following
-to your project's ``setup.cfg`` file:
-
-.. code-block:: ini
-
- [egg_info]
- tag_build = .dev
- tag_svn_revision = 1
-
-This will tell ``setuptools`` to generate package version numbers like
-``1.0a1.dev-r1263``, which will be considered to be an *older* release than
-``1.0a1``. Thus, when you actually release ``1.0a1``, the entire egg
-infrastructure (including ``setuptools``, ``pkg_resources`` and EasyInstall)
-will know that ``1.0a1`` supersedes any interim snapshots from Subversion, and
-handle upgrades accordingly.
-
-(Note: the project version number you specify in ``setup.py`` should always be
-the *next* version of your software, not the last released version.
-Alternately, you can leave out the ``tag_build=.dev``, and always use the
-*last* release as a version number, so that your post-1.0 builds are labelled
-``1.0-r1263``, indicating a post-1.0 patchlevel. Most projects so far,
-however, seem to prefer to think of their project as being a future version
-still under development, rather than a past version being patched. It is of
-course possible for a single project to have both situations, using
-post-release numbering on release branches, and pre-release numbering on the
-trunk. But you don't have to make things this complex if you don't want to.)
-
-Commonly, projects releasing code from Subversion will include a PyPI link to
-their checkout URL (as described in the previous section) with an
-``#egg=projectname-dev`` suffix. This allows users to request EasyInstall
-to download ``projectname==dev`` in order to get the latest in-development
-code. Note that if your project depends on such in-progress code, you may wish
-to specify your ``install_requires`` (or other requirements) to include
-``==dev``, e.g.:
-
-.. code-block:: python
-
- install_requires = ["OtherProject>=0.2a1.dev-r143,==dev"]
-
-The above example says, "I really want at least this particular development
-revision number, but feel free to follow and use an ``#egg=OtherProject-dev``
-link if you find one". This avoids the need to have actual source or binary
-distribution snapshots of in-development code available, just to be able to
-depend on the latest and greatest a project has to offer.
-
-A final note for Subversion development: if you are using SVN revision tags
-as described in this section, it's a good idea to run ``setup.py develop``
-after each Subversion checkin or update, because your project's version number
-will be changing, and your script wrappers need to be updated accordingly.
-
-Also, if the project's requirements have changed, the ``develop`` command will
-take care of fetching the updated dependencies, building changed extensions,
-etc. Be sure to also remind any of your users who check out your project
-from Subversion that they need to run ``setup.py develop`` after every update
-in order to keep their checkout completely in sync.
-
-
-Making "Official" (Non-Snapshot) Releases
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-When you make an official release, creating source or binary distributions,
-you will need to override the tag settings from ``setup.cfg``, so that you
-don't end up registering versions like ``foobar-0.7a1.dev-r34832``. This is
-easy to do if you are developing on the trunk and using tags or branches for
-your releases - just make the change to ``setup.cfg`` after branching or
-tagging the release, so the trunk will still produce development snapshots.
-
-Alternately, if you are not branching for releases, you can override the
-default version options on the command line, using something like::
-
- python setup.py egg_info -RDb "" sdist bdist_egg register upload
-
-The first part of this command (``egg_info -RDb ""``) will override the
-configured tag information, before creating source and binary eggs, registering
-the project with PyPI, and uploading the files. Thus, these commands will use
-the plain version from your ``setup.py``, without adding the Subversion
-revision number or build designation string.
-
-Of course, if you will be doing this a lot, you may wish to create a personal
-alias for this operation, e.g.::
-
- python setup.py alias -u release egg_info -RDb ""
-
-You can then use it like this::
-
- python setup.py release sdist bdist_egg register upload
-
-Or of course you can create more elaborate aliases that do all of the above.
-See the sections below on the `egg_info`_ and `alias`_ commands for more ideas.
-
-
-
-Distributing Extensions compiled with Pyrex
--------------------------------------------
-
-``setuptools`` includes transparent support for building Pyrex extensions, as
-long as you define your extensions using ``setuptools.Extension``, *not*
-``distutils.Extension``. You must also not import anything from Pyrex in
-your setup script.
-
-If you follow these rules, you can safely list ``.pyx`` files as the source
-of your ``Extension`` objects in the setup script. ``setuptools`` will detect
-at build time whether Pyrex is installed or not. If it is, then ``setuptools``
-will use it. If not, then ``setuptools`` will silently change the
-``Extension`` objects to refer to the ``.c`` counterparts of the ``.pyx``
-files, so that the normal distutils C compilation process will occur.
-
-Of course, for this to work, your source distributions must include the C
-code generated by Pyrex, as well as your original ``.pyx`` files. This means
-that you will probably want to include current ``.c`` files in your revision
-control system, rebuilding them whenever you check changes in for the ``.pyx``
-source files. This will ensure that people tracking your project in a revision
-control system will be able to build it even if they don't have Pyrex
-installed, and that your source releases will be similarly usable with or
-without Pyrex.
-
-
------------------
-Command Reference
------------------
-
-.. _alias:
-
-``alias`` - Define shortcuts for commonly used commands
-=======================================================
-
-Sometimes, you need to use the same commands over and over, but you can't
-necessarily set them as defaults. For example, if you produce both development
-snapshot releases and "stable" releases of a project, you may want to put
-the distributions in different places, or use different ``egg_info`` tagging
-options, etc. In these cases, it doesn't make sense to set the options in
-a distutils configuration file, because the values of the options changed based
-on what you're trying to do.
-
-Setuptools therefore allows you to define "aliases" - shortcut names for
-an arbitrary string of commands and options, using ``setup.py alias aliasname
-expansion``, where aliasname is the name of the new alias, and the remainder of
-the command line supplies its expansion. For example, this command defines
-a sitewide alias called "daily", that sets various ``egg_info`` tagging
-options::
-
- setup.py alias --global-config daily egg_info --tag-svn-revision \
- --tag-build=development
-
-Once the alias is defined, it can then be used with other setup commands,
-e.g.::
-
- setup.py daily bdist_egg # generate a daily-build .egg file
- setup.py daily sdist # generate a daily-build source distro
- setup.py daily sdist bdist_egg # generate both
-
-The above commands are interpreted as if the word ``daily`` were replaced with
-``egg_info --tag-svn-revision --tag-build=development``.
-
-Note that setuptools will expand each alias *at most once* in a given command
-line. This serves two purposes. First, if you accidentally create an alias
-loop, it will have no effect; you'll instead get an error message about an
-unknown command. Second, it allows you to define an alias for a command, that
-uses that command. For example, this (project-local) alias::
-
- setup.py alias bdist_egg bdist_egg rotate -k1 -m.egg
-
-redefines the ``bdist_egg`` command so that it always runs the ``rotate``
-command afterwards to delete all but the newest egg file. It doesn't loop
-indefinitely on ``bdist_egg`` because the alias is only expanded once when
-used.
-
-You can remove a defined alias with the ``--remove`` (or ``-r``) option, e.g.::
-
- setup.py alias --global-config --remove daily
-
-would delete the "daily" alias we defined above.
-
-Aliases can be defined on a project-specific, per-user, or sitewide basis. The
-default is to define or remove a project-specific alias, but you can use any of
-the `configuration file options`_ (listed under the `saveopts`_ command, below)
-to determine which distutils configuration file an aliases will be added to
-(or removed from).
-
-Note that if you omit the "expansion" argument to the ``alias`` command,
-you'll get output showing that alias' current definition (and what
-configuration file it's defined in). If you omit the alias name as well,
-you'll get a listing of all current aliases along with their configuration
-file locations.
-
-
-``bdist_egg`` - Create a Python Egg for the project
-===================================================
-
-This command generates a Python Egg (``.egg`` file) for the project. Python
-Eggs are the preferred binary distribution format for EasyInstall, because they
-are cross-platform (for "pure" packages), directly importable, and contain
-project metadata including scripts and information about the project's
-dependencies. They can be simply downloaded and added to ``sys.path``
-directly, or they can be placed in a directory on ``sys.path`` and then
-automatically discovered by the egg runtime system.
-
-This command runs the `egg_info`_ command (if it hasn't already run) to update
-the project's metadata (``.egg-info``) directory. If you have added any extra
-metadata files to the ``.egg-info`` directory, those files will be included in
-the new egg file's metadata directory, for use by the egg runtime system or by
-any applications or frameworks that use that metadata.
-
-You won't usually need to specify any special options for this command; just
-use ``bdist_egg`` and you're done. But there are a few options that may
-be occasionally useful:
-
-``--dist-dir=DIR, -d DIR``
- Set the directory where the ``.egg`` file will be placed. If you don't
- supply this, then the ``--dist-dir`` setting of the ``bdist`` command
- will be used, which is usually a directory named ``dist`` in the project
- directory.
-
-``--plat-name=PLATFORM, -p PLATFORM``
- Set the platform name string that will be embedded in the egg's filename
- (assuming the egg contains C extensions). This can be used to override
- the distutils default platform name with something more meaningful. Keep
- in mind, however, that the egg runtime system expects to see eggs with
- distutils platform names, so it may ignore or reject eggs with non-standard
- platform names. Similarly, the EasyInstall program may ignore them when
- searching web pages for download links. However, if you are
- cross-compiling or doing some other unusual things, you might find a use
- for this option.
-
-``--exclude-source-files``
- Don't include any modules' ``.py`` files in the egg, just compiled Python,
- C, and data files. (Note that this doesn't affect any ``.py`` files in the
- EGG-INFO directory or its subdirectories, since for example there may be
- scripts with a ``.py`` extension which must still be retained.) We don't
- recommend that you use this option except for packages that are being
- bundled for proprietary end-user applications, or for "embedded" scenarios
- where space is at an absolute premium. On the other hand, if your package
- is going to be installed and used in compressed form, you might as well
- exclude the source because Python's ``traceback`` module doesn't currently
- understand how to display zipped source code anyway, or how to deal with
- files that are in a different place from where their code was compiled.
-
-There are also some options you will probably never need, but which are there
-because they were copied from similar ``bdist`` commands used as an example for
-creating this one. They may be useful for testing and debugging, however,
-which is why we kept them:
-
-``--keep-temp, -k``
- Keep the contents of the ``--bdist-dir`` tree around after creating the
- ``.egg`` file.
-
-``--bdist-dir=DIR, -b DIR``
- Set the temporary directory for creating the distribution. The entire
- contents of this directory are zipped to create the ``.egg`` file, after
- running various installation commands to copy the package's modules, data,
- and extensions here.
-
-``--skip-build``
- Skip doing any "build" commands; just go straight to the
- install-and-compress phases.
-
-
-.. _develop:
-
-``develop`` - Deploy the project source in "Development Mode"
-=============================================================
-
-This command allows you to deploy your project's source for use in one or more
-"staging areas" where it will be available for importing. This deployment is
-done in such a way that changes to the project source are immediately available
-in the staging area(s), without needing to run a build or install step after
-each change.
-
-The ``develop`` command works by creating an ``.egg-link`` file (named for the
-project) in the given staging area. If the staging area is Python's
-``site-packages`` directory, it also updates an ``easy-install.pth`` file so
-that the project is on ``sys.path`` by default for all programs run using that
-Python installation.
-
-The ``develop`` command also installs wrapper scripts in the staging area (or
-a separate directory, as specified) that will ensure the project's dependencies
-are available on ``sys.path`` before running the project's source scripts.
-And, it ensures that any missing project dependencies are available in the
-staging area, by downloading and installing them if necessary.
-
-Last, but not least, the ``develop`` command invokes the ``build_ext -i``
-command to ensure any C extensions in the project have been built and are
-up-to-date, and the ``egg_info`` command to ensure the project's metadata is
-updated (so that the runtime and wrappers know what the project's dependencies
-are). If you make any changes to the project's setup script or C extensions,
-you should rerun the ``develop`` command against all relevant staging areas to
-keep the project's scripts, metadata and extensions up-to-date. Most other
-kinds of changes to your project should not require any build operations or
-rerunning ``develop``, but keep in mind that even minor changes to the setup
-script (e.g. changing an entry point definition) require you to re-run the
-``develop`` or ``test`` commands to keep the distribution updated.
-
-Here are some of the options that the ``develop`` command accepts. Note that
-they affect the project's dependencies as well as the project itself, so if you
-have dependencies that need to be installed and you use ``--exclude-scripts``
-(for example), the dependencies' scripts will not be installed either! For
-this reason, you may want to use EasyInstall to install the project's
-dependencies before using the ``develop`` command, if you need finer control
-over the installation options for dependencies.
-
-``--uninstall, -u``
- Un-deploy the current project. You may use the ``--install-dir`` or ``-d``
- option to designate the staging area. The created ``.egg-link`` file will
- be removed, if present and it is still pointing to the project directory.
- The project directory will be removed from ``easy-install.pth`` if the
- staging area is Python's ``site-packages`` directory.
-
- Note that this option currently does *not* uninstall script wrappers! You
- must uninstall them yourself, or overwrite them by using EasyInstall to
- activate a different version of the package. You can also avoid installing
- script wrappers in the first place, if you use the ``--exclude-scripts``
- (aka ``-x``) option when you run ``develop`` to deploy the project.
-
-``--multi-version, -m``
- "Multi-version" mode. Specifying this option prevents ``develop`` from
- adding an ``easy-install.pth`` entry for the project(s) being deployed, and
- if an entry for any version of a project already exists, the entry will be
- removed upon successful deployment. In multi-version mode, no specific
- version of the package is available for importing, unless you use
- ``pkg_resources.require()`` to put it on ``sys.path``, or you are running
- a wrapper script generated by ``setuptools`` or EasyInstall. (In which
- case the wrapper script calls ``require()`` for you.)
-
- Note that if you install to a directory other than ``site-packages``,
- this option is automatically in effect, because ``.pth`` files can only be
- used in ``site-packages`` (at least in Python 2.3 and 2.4). So, if you use
- the ``--install-dir`` or ``-d`` option (or they are set via configuration
- file(s)) your project and its dependencies will be deployed in multi-
- version mode.
-
-``--install-dir=DIR, -d DIR``
- Set the installation directory (staging area). If this option is not
- directly specified on the command line or in a distutils configuration
- file, the distutils default installation location is used. Normally, this
- will be the ``site-packages`` directory, but if you are using distutils
- configuration files, setting things like ``prefix`` or ``install_lib``,
- then those settings are taken into account when computing the default
- staging area.
-
-``--script-dir=DIR, -s DIR``
- Set the script installation directory. If you don't supply this option
- (via the command line or a configuration file), but you *have* supplied
- an ``--install-dir`` (via command line or config file), then this option
- defaults to the same directory, so that the scripts will be able to find
- their associated package installation. Otherwise, this setting defaults
- to the location where the distutils would normally install scripts, taking
- any distutils configuration file settings into account.
-
-``--exclude-scripts, -x``
- Don't deploy script wrappers. This is useful if you don't want to disturb
- existing versions of the scripts in the staging area.
-
-``--always-copy, -a``
- Copy all needed distributions to the staging area, even if they
- are already present in another directory on ``sys.path``. By default, if
- a requirement can be met using a distribution that is already available in
- a directory on ``sys.path``, it will not be copied to the staging area.
-
-``--egg-path=DIR``
- Force the generated ``.egg-link`` file to use a specified relative path
- to the source directory. This can be useful in circumstances where your
- installation directory is being shared by code running under multiple
- platforms (e.g. Mac and Windows) which have different absolute locations
- for the code under development, but the same *relative* locations with
- respect to the installation directory. If you use this option when
- installing, you must supply the same relative path when uninstalling.
-
-In addition to the above options, the ``develop`` command also accepts all of
-the same options accepted by ``easy_install``. If you've configured any
-``easy_install`` settings in your ``setup.cfg`` (or other distutils config
-files), the ``develop`` command will use them as defaults, unless you override
-them in a ``[develop]`` section or on the command line.
-
-
-``easy_install`` - Find and install packages
-============================================
-
-This command runs the `EasyInstall tool
-<easy_install.html>`_ for you. It is exactly
-equivalent to running the ``easy_install`` command. All command line arguments
-following this command are consumed and not processed further by the distutils,
-so this must be the last command listed on the command line. Please see
-the EasyInstall documentation for the options reference and usage examples.
-Normally, there is no reason to use this command via the command line, as you
-can just use ``easy_install`` directly. It's only listed here so that you know
-it's a distutils command, which means that you can:
-
-* create command aliases that use it,
-* create distutils extensions that invoke it as a subcommand, and
-* configure options for it in your ``setup.cfg`` or other distutils config
- files.
-
-
-.. _egg_info:
-
-``egg_info`` - Create egg metadata and set build tags
-=====================================================
-
-This command performs two operations: it updates a project's ``.egg-info``
-metadata directory (used by the ``bdist_egg``, ``develop``, and ``test``
-commands), and it allows you to temporarily change a project's version string,
-to support "daily builds" or "snapshot" releases. It is run automatically by
-the ``sdist``, ``bdist_egg``, ``develop``, ``register``, and ``test`` commands
-in order to update the project's metadata, but you can also specify it
-explicitly in order to temporarily change the project's version string while
-executing other commands. (It also generates the``.egg-info/SOURCES.txt``
-manifest file, which is used when you are building source distributions.)
-
-In addition to writing the core egg metadata defined by ``setuptools`` and
-required by ``pkg_resources``, this command can be extended to write other
-metadata files as well, by defining entry points in the ``egg_info.writers``
-group. See the section on `Adding new EGG-INFO Files`_ below for more details.
-Note that using additional metadata writers may require you to include a
-``setup_requires`` argument to ``setup()`` in order to ensure that the desired
-writers are available on ``sys.path``.
-
-
-Release Tagging Options
------------------------
-
-The following options can be used to modify the project's version string for
-all remaining commands on the setup command line. The options are processed
-in the order shown, so if you use more than one, the requested tags will be
-added in the following order:
-
-``--tag-build=NAME, -b NAME``
- Append NAME to the project's version string. Due to the way setuptools
- processes "pre-release" version suffixes beginning with the letters "a"
- through "e" (like "alpha", "beta", and "candidate"), you will usually want
- to use a tag like ".build" or ".dev", as this will cause the version number
- to be considered *lower* than the project's default version. (If you
- want to make the version number *higher* than the default version, you can
- always leave off --tag-build and then use one or both of the following
- options.)
-
- If you have a default build tag set in your ``setup.cfg``, you can suppress
- it on the command line using ``-b ""`` or ``--tag-build=""`` as an argument
- to the ``egg_info`` command.
-
-``--tag-svn-revision, -r``
- If the current directory is a Subversion checkout (i.e. has a ``.svn``
- subdirectory, this appends a string of the form "-rNNNN" to the project's
- version string, where NNNN is the revision number of the most recent
- modification to the current directory, as obtained from the ``svn info``
- command.
-
- If the current directory is not a Subversion checkout, the command will
- look for a ``PKG-INFO`` file instead, and try to find the revision number
- from that, by looking for a "-rNNNN" string at the end of the version
- number. (This is so that building a package from a source distribution of
- a Subversion snapshot will produce a binary with the correct version
- number.)
-
- If there is no ``PKG-INFO`` file, or the version number contained therein
- does not end with ``-r`` and a number, then ``-r0`` is used.
-
-``--no-svn-revision, -R``
- Don't include the Subversion revision in the version number. This option
- is included so you can override a default setting put in ``setup.cfg``.
-
-``--tag-date, -d``
- Add a date stamp of the form "-YYYYMMDD" (e.g. "-20050528") to the
- project's version number.
-
-``--no-date, -D``
- Don't include a date stamp in the version number. This option is included
- so you can override a default setting in ``setup.cfg``.
-
-
-(Note: Because these options modify the version number used for source and
-binary distributions of your project, you should first make sure that you know
-how the resulting version numbers will be interpreted by automated tools
-like EasyInstall. See the section above on `Specifying Your Project's
-Version`_ for an explanation of pre- and post-release tags, as well as tips on
-how to choose and verify a versioning scheme for your your project.)
-
-For advanced uses, there is one other option that can be set, to change the
-location of the project's ``.egg-info`` directory. Commands that need to find
-the project's source directory or metadata should get it from this setting:
-
-
-Other ``egg_info`` Options
---------------------------
-
-``--egg-base=SOURCEDIR, -e SOURCEDIR``
- Specify the directory that should contain the .egg-info directory. This
- should normally be the root of your project's source tree (which is not
- necessarily the same as your project directory; some projects use a ``src``
- or ``lib`` subdirectory as the source root). You should not normally need
- to specify this directory, as it is normally determined from the
- ``package_dir`` argument to the ``setup()`` function, if any. If there is
- no ``package_dir`` set, this option defaults to the current directory.
-
-
-``egg_info`` Examples
----------------------
-
-Creating a dated "nightly build" snapshot egg::
-
- python setup.py egg_info --tag-date --tag-build=DEV bdist_egg
-
-Creating and uploading a release with no version tags, even if some default
-tags are specified in ``setup.cfg``::
-
- python setup.py egg_info -RDb "" sdist bdist_egg register upload
-
-(Notice that ``egg_info`` must always appear on the command line *before* any
-commands that you want the version changes to apply to.)
-
-
-.. _install command:
-
-``install`` - Run ``easy_install`` or old-style installation
-============================================================
-
-The setuptools ``install`` command is basically a shortcut to run the
-``easy_install`` command on the current project. However, for convenience
-in creating "system packages" of setuptools-based projects, you can also
-use this option:
-
-``--single-version-externally-managed``
- This boolean option tells the ``install`` command to perform an "old style"
- installation, with the addition of an ``.egg-info`` directory so that the
- installed project will still have its metadata available and operate
- normally. If you use this option, you *must* also specify the ``--root``
- or ``--record`` options (or both), because otherwise you will have no way
- to identify and remove the installed files.
-
-This option is automatically in effect when ``install`` is invoked by another
-distutils command, so that commands like ``bdist_wininst`` and ``bdist_rpm``
-will create system packages of eggs. It is also automatically in effect if
-you specify the ``--root`` option.
-
-
-``install_egg_info`` - Install an ``.egg-info`` directory in ``site-packages``
-==============================================================================
-
-Setuptools runs this command as part of ``install`` operations that use the
-``--single-version-externally-managed`` options. You should not invoke it
-directly; it is documented here for completeness and so that distutils
-extensions such as system package builders can make use of it. This command
-has only one option:
-
-``--install-dir=DIR, -d DIR``
- The parent directory where the ``.egg-info`` directory will be placed.
- Defaults to the same as the ``--install-dir`` option specified for the
- ``install_lib`` command, which is usually the system ``site-packages``
- directory.
-
-This command assumes that the ``egg_info`` command has been given valid options
-via the command line or ``setup.cfg``, as it will invoke the ``egg_info``
-command and use its options to locate the project's source ``.egg-info``
-directory.
-
-
-.. _rotate:
-
-``rotate`` - Delete outdated distribution files
-===============================================
-
-As you develop new versions of your project, your distribution (``dist``)
-directory will gradually fill up with older source and/or binary distribution
-files. The ``rotate`` command lets you automatically clean these up, keeping
-only the N most-recently modified files matching a given pattern.
-
-``--match=PATTERNLIST, -m PATTERNLIST``
- Comma-separated list of glob patterns to match. This option is *required*.
- The project name and ``-*`` is prepended to the supplied patterns, in order
- to match only distributions belonging to the current project (in case you
- have a shared distribution directory for multiple projects). Typically,
- you will use a glob pattern like ``.zip`` or ``.egg`` to match files of
- the specified type. Note that each supplied pattern is treated as a
- distinct group of files for purposes of selecting files to delete.
-
-``--keep=COUNT, -k COUNT``
- Number of matching distributions to keep. For each group of files
- identified by a pattern specified with the ``--match`` option, delete all
- but the COUNT most-recently-modified files in that group. This option is
- *required*.
-
-``--dist-dir=DIR, -d DIR``
- Directory where the distributions are. This defaults to the value of the
- ``bdist`` command's ``--dist-dir`` option, which will usually be the
- project's ``dist`` subdirectory.
-
-**Example 1**: Delete all .tar.gz files from the distribution directory, except
-for the 3 most recently modified ones::
-
- setup.py rotate --match=.tar.gz --keep=3
-
-**Example 2**: Delete all Python 2.3 or Python 2.4 eggs from the distribution
-directory, except the most recently modified one for each Python version::
-
- setup.py rotate --match=-py2.3*.egg,-py2.4*.egg --keep=1
-
-
-.. _saveopts:
-
-``saveopts`` - Save used options to a configuration file
-========================================================
-
-Finding and editing ``distutils`` configuration files can be a pain, especially
-since you also have to translate the configuration options from command-line
-form to the proper configuration file format. You can avoid these hassles by
-using the ``saveopts`` command. Just add it to the command line to save the
-options you used. For example, this command builds the project using
-the ``mingw32`` C compiler, then saves the --compiler setting as the default
-for future builds (even those run implicitly by the ``install`` command)::
-
- setup.py build --compiler=mingw32 saveopts
-
-The ``saveopts`` command saves all options for every command specified on the
-command line to the project's local ``setup.cfg`` file, unless you use one of
-the `configuration file options`_ to change where the options are saved. For
-example, this command does the same as above, but saves the compiler setting
-to the site-wide (global) distutils configuration::
-
- setup.py build --compiler=mingw32 saveopts -g
-
-Note that it doesn't matter where you place the ``saveopts`` command on the
-command line; it will still save all the options specified for all commands.
-For example, this is another valid way to spell the last example::
-
- setup.py saveopts -g build --compiler=mingw32
-
-Note, however, that all of the commands specified are always run, regardless of
-where ``saveopts`` is placed on the command line.
-
-
-Configuration File Options
---------------------------
-
-Normally, settings such as options and aliases are saved to the project's
-local ``setup.cfg`` file. But you can override this and save them to the
-global or per-user configuration files, or to a manually-specified filename.
-
-``--global-config, -g``
- Save settings to the global ``distutils.cfg`` file inside the ``distutils``
- package directory. You must have write access to that directory to use
- this option. You also can't combine this option with ``-u`` or ``-f``.
-
-``--user-config, -u``
- Save settings to the current user's ``~/.pydistutils.cfg`` (POSIX) or
- ``$HOME/pydistutils.cfg`` (Windows) file. You can't combine this option
- with ``-g`` or ``-f``.
-
-``--filename=FILENAME, -f FILENAME``
- Save settings to the specified configuration file to use. You can't
- combine this option with ``-g`` or ``-u``. Note that if you specify a
- non-standard filename, the ``distutils`` and ``setuptools`` will not
- use the file's contents. This option is mainly included for use in
- testing.
-
-These options are used by other ``setuptools`` commands that modify
-configuration files, such as the `alias`_ and `setopt`_ commands.
-
-
-.. _setopt:
-
-``setopt`` - Set a distutils or setuptools option in a config file
-==================================================================
-
-This command is mainly for use by scripts, but it can also be used as a quick
-and dirty way to change a distutils configuration option without having to
-remember what file the options are in and then open an editor.
-
-**Example 1**. Set the default C compiler to ``mingw32`` (using long option
-names)::
-
- setup.py setopt --command=build --option=compiler --set-value=mingw32
-
-**Example 2**. Remove any setting for the distutils default package
-installation directory (short option names)::
-
- setup.py setopt -c install -o install_lib -r
-
-
-Options for the ``setopt`` command:
-
-``--command=COMMAND, -c COMMAND``
- Command to set the option for. This option is required.
-
-``--option=OPTION, -o OPTION``
- The name of the option to set. This option is required.
-
-``--set-value=VALUE, -s VALUE``
- The value to set the option to. Not needed if ``-r`` or ``--remove`` is
- set.
-
-``--remove, -r``
- Remove (unset) the option, instead of setting it.
-
-In addition to the above options, you may use any of the `configuration file
-options`_ (listed under the `saveopts`_ command, above) to determine which
-distutils configuration file the option will be added to (or removed from).
-
-
-.. _test:
-
-``test`` - Build package and run a unittest suite
-=================================================
-
-When doing test-driven development, or running automated builds that need
-testing before they are deployed for downloading or use, it's often useful
-to be able to run a project's unit tests without actually deploying the project
-anywhere, even using the ``develop`` command. The ``test`` command runs a
-project's unit tests without actually deploying it, by temporarily putting the
-project's source on ``sys.path``, after first running ``build_ext -i`` and
-``egg_info`` to ensure that any C extensions and project metadata are
-up-to-date.
-
-To use this command, your project's tests must be wrapped in a ``unittest``
-test suite by either a function, a ``TestCase`` class or method, or a module
-or package containing ``TestCase`` classes. If the named suite is a module,
-and the module has an ``additional_tests()`` function, it is called and the
-result (which must be a ``unittest.TestSuite``) is added to the tests to be
-run. If the named suite is a package, any submodules and subpackages are
-recursively added to the overall test suite. (Note: if your project specifies
-a ``test_loader``, the rules for processing the chosen ``test_suite`` may
-differ; see the `test_loader`_ documentation for more details.)
-
-Note that many test systems including ``doctest`` support wrapping their
-non-``unittest`` tests in ``TestSuite`` objects. So, if you are using a test
-package that does not support this, we suggest you encourage its developers to
-implement test suite support, as this is a convenient and standard way to
-aggregate a collection of tests to be run under a common test harness.
-
-By default, tests will be run in the "verbose" mode of the ``unittest``
-package's text test runner, but you can get the "quiet" mode (just dots) if
-you supply the ``-q`` or ``--quiet`` option, either as a global option to
-the setup script (e.g. ``setup.py -q test``) or as an option for the ``test``
-command itself (e.g. ``setup.py test -q``). There is one other option
-available:
-
-``--test-suite=NAME, -s NAME``
- Specify the test suite (or module, class, or method) to be run
- (e.g. ``some_module.test_suite``). The default for this option can be
- set by giving a ``test_suite`` argument to the ``setup()`` function, e.g.::
-
- setup(
- # ...
- test_suite = "my_package.tests.test_all"
- )
-
- If you did not set a ``test_suite`` in your ``setup()`` call, and do not
- provide a ``--test-suite`` option, an error will occur.
-
-
-.. _upload:
-
-``upload`` - Upload source and/or egg distributions to PyPI
-===========================================================
-
-The ``upload`` command is implemented and `documented
-<https://docs.python.org/3.1/distutils/uploading.html>`_
-in distutils.
-
-Setuptools augments the ``upload`` command with support
-for `keyring <https://pypi.python.org/pypi/keyring>`_,
-allowing the password to be stored in a secure
-location and not in plaintext in the .pypirc file. To use
-keyring, first install keyring and set the password for
-the relevant repository, e.g.::
-
- python -m keyring set <repository> <username>
- Password for '<username>' in '<repository>': ********
-
-Then, in .pypirc, set the repository configuration as normal,
-but omit the password. Thereafter, uploads will use the
-password from the keyring.
-
-New in 20.1: Added keyring support.
-
-.. _upload_docs:
-
-``upload_docs`` - Upload package documentation to PyPI
-======================================================
-
-PyPI now supports uploading project documentation to the dedicated URL
-https://pythonhosted.org/<project>/.
-
-The ``upload_docs`` command will create the necessary zip file out of a
-documentation directory and will post to the repository.
-
-Note that to upload the documentation of a project, the corresponding version
-must already be registered with PyPI, using the distutils ``register``
-command -- just like the ``upload`` command.
-
-Assuming there is an ``Example`` project with documentation in the
-subdirectory ``docs``, e.g.::
-
- Example/
- |-- example.py
- |-- setup.cfg
- |-- setup.py
- |-- docs
- | |-- build
- | | `-- html
- | | | |-- index.html
- | | | `-- tips_tricks.html
- | |-- conf.py
- | |-- index.txt
- | `-- tips_tricks.txt
-
-You can simply pass the documentation directory path to the ``upload_docs``
-command::
-
- python setup.py upload_docs --upload-dir=docs/build/html
-
-If no ``--upload-dir`` is given, ``upload_docs`` will attempt to run the
-``build_sphinx`` command to generate uploadable documentation.
-For the command to become available, `Sphinx <http://sphinx.pocoo.org/>`_
-must be installed in the same environment as distribute.
-
-As with other ``setuptools``-based commands, you can define useful
-defaults in the ``setup.cfg`` of your Python project, e.g.:
-
-.. code-block:: ini
-
- [upload_docs]
- upload-dir = docs/build/html
-
-The ``upload_docs`` command has the following options:
-
-``--upload-dir``
- The directory to be uploaded to the repository.
-
-``--show-response``
- Display the full response text from server; this is useful for debugging
- PyPI problems.
-
-``--repository=URL, -r URL``
- The URL of the repository to upload to. Defaults to
- https://pypi.python.org/pypi (i.e., the main PyPI installation).
-
-
---------------------------------
-Extending and Reusing Setuptools
---------------------------------
-
-Creating ``distutils`` Extensions
-=================================
-
-It can be hard to add new commands or setup arguments to the distutils. But
-the ``setuptools`` package makes it a bit easier, by allowing you to distribute
-a distutils extension as a separate project, and then have projects that need
-the extension just refer to it in their ``setup_requires`` argument.
-
-With ``setuptools``, your distutils extension projects can hook in new
-commands and ``setup()`` arguments just by defining "entry points". These
-are mappings from command or argument names to a specification of where to
-import a handler from. (See the section on `Dynamic Discovery of Services and
-Plugins`_ above for some more background on entry points.)
-
-
-Adding Commands
----------------
-
-You can add new ``setup`` commands by defining entry points in the
-``distutils.commands`` group. For example, if you wanted to add a ``foo``
-command, you might add something like this to your distutils extension
-project's setup script::
-
- setup(
- # ...
- entry_points = {
- "distutils.commands": [
- "foo = mypackage.some_module:foo",
- ],
- },
- )
-
-(Assuming, of course, that the ``foo`` class in ``mypackage.some_module`` is
-a ``setuptools.Command`` subclass.)
-
-Once a project containing such entry points has been activated on ``sys.path``,
-(e.g. by running "install" or "develop" with a site-packages installation
-directory) the command(s) will be available to any ``setuptools``-based setup
-scripts. It is not necessary to use the ``--command-packages`` option or
-to monkeypatch the ``distutils.command`` package to install your commands;
-``setuptools`` automatically adds a wrapper to the distutils to search for
-entry points in the active distributions on ``sys.path``. In fact, this is
-how setuptools' own commands are installed: the setuptools project's setup
-script defines entry points for them!
-
-
-Adding ``setup()`` Arguments
-----------------------------
-
-Sometimes, your commands may need additional arguments to the ``setup()``
-call. You can enable this by defining entry points in the
-``distutils.setup_keywords`` group. For example, if you wanted a ``setup()``
-argument called ``bar_baz``, you might add something like this to your
-distutils extension project's setup script::
-
- setup(
- # ...
- entry_points = {
- "distutils.commands": [
- "foo = mypackage.some_module:foo",
- ],
- "distutils.setup_keywords": [
- "bar_baz = mypackage.some_module:validate_bar_baz",
- ],
- },
- )
-
-The idea here is that the entry point defines a function that will be called
-to validate the ``setup()`` argument, if it's supplied. The ``Distribution``
-object will have the initial value of the attribute set to ``None``, and the
-validation function will only be called if the ``setup()`` call sets it to
-a non-None value. Here's an example validation function::
-
- def assert_bool(dist, attr, value):
- """Verify that value is True, False, 0, or 1"""
- if bool(value) != value:
- raise DistutilsSetupError(
- "%r must be a boolean value (got %r)" % (attr,value)
- )
-
-Your function should accept three arguments: the ``Distribution`` object,
-the attribute name, and the attribute value. It should raise a
-``DistutilsSetupError`` (from the ``distutils.errors`` module) if the argument
-is invalid. Remember, your function will only be called with non-None values,
-and the default value of arguments defined this way is always None. So, your
-commands should always be prepared for the possibility that the attribute will
-be ``None`` when they access it later.
-
-If more than one active distribution defines an entry point for the same
-``setup()`` argument, *all* of them will be called. This allows multiple
-distutils extensions to define a common argument, as long as they agree on
-what values of that argument are valid.
-
-Also note that as with commands, it is not necessary to subclass or monkeypatch
-the distutils ``Distribution`` class in order to add your arguments; it is
-sufficient to define the entry points in your extension, as long as any setup
-script using your extension lists your project in its ``setup_requires``
-argument.
-
-
-Adding new EGG-INFO Files
--------------------------
-
-Some extensible applications or frameworks may want to allow third parties to
-develop plugins with application or framework-specific metadata included in
-the plugins' EGG-INFO directory, for easy access via the ``pkg_resources``
-metadata API. The easiest way to allow this is to create a distutils extension
-to be used from the plugin projects' setup scripts (via ``setup_requires``)
-that defines a new setup keyword, and then uses that data to write an EGG-INFO
-file when the ``egg_info`` command is run.
-
-The ``egg_info`` command looks for extension points in an ``egg_info.writers``
-group, and calls them to write the files. Here's a simple example of a
-distutils extension defining a setup argument ``foo_bar``, which is a list of
-lines that will be written to ``foo_bar.txt`` in the EGG-INFO directory of any
-project that uses the argument::
-
- setup(
- # ...
- entry_points = {
- "distutils.setup_keywords": [
- "foo_bar = setuptools.dist:assert_string_list",
- ],
- "egg_info.writers": [
- "foo_bar.txt = setuptools.command.egg_info:write_arg",
- ],
- },
- )
-
-This simple example makes use of two utility functions defined by setuptools
-for its own use: a routine to validate that a setup keyword is a sequence of
-strings, and another one that looks up a setup argument and writes it to
-a file. Here's what the writer utility looks like::
-
- def write_arg(cmd, basename, filename):
- argname = os.path.splitext(basename)[0]
- value = getattr(cmd.distribution, argname, None)
- if value is not None:
- value = '\n'.join(value)+'\n'
- cmd.write_or_delete_file(argname, filename, value)
-
-As you can see, ``egg_info.writers`` entry points must be a function taking
-three arguments: a ``egg_info`` command instance, the basename of the file to
-write (e.g. ``foo_bar.txt``), and the actual full filename that should be
-written to.
-
-In general, writer functions should honor the command object's ``dry_run``
-setting when writing files, and use the ``distutils.log`` object to do any
-console output. The easiest way to conform to this requirement is to use
-the ``cmd`` object's ``write_file()``, ``delete_file()``, and
-``write_or_delete_file()`` methods exclusively for your file operations. See
-those methods' docstrings for more details.
-
-
-Adding Support for Revision Control Systems
--------------------------------------------------
-
-If the files you want to include in the source distribution are tracked using
-Git, Mercurial or SVN, you can use the following packages to achieve that:
-
-- Git and Mercurial: `setuptools_scm <https://pypi.python.org/pypi/setuptools_scm>`_
-- SVN: `setuptools_svn <https://pypi.python.org/pypi/setuptools_svn>`_
-
-If you would like to create a plugin for ``setuptools`` to find files tracked
-by another revision control system, you can do so by adding an entry point to
-the ``setuptools.file_finders`` group. The entry point should be a function
-accepting a single directory name, and should yield all the filenames within
-that directory (and any subdirectories thereof) that are under revision
-control.
-
-For example, if you were going to create a plugin for a revision control system
-called "foobar", you would write a function something like this:
-
-.. code-block:: python
-
- def find_files_for_foobar(dirname):
- # loop to yield paths that start with `dirname`
-
-And you would register it in a setup script using something like this::
-
- entry_points = {
- "setuptools.file_finders": [
- "foobar = my_foobar_module:find_files_for_foobar"
- ]
- }
-
-Then, anyone who wants to use your plugin can simply install it, and their
-local setuptools installation will be able to find the necessary files.
-
-It is not necessary to distribute source control plugins with projects that
-simply use the other source control system, or to specify the plugins in
-``setup_requires``. When you create a source distribution with the ``sdist``
-command, setuptools automatically records what files were found in the
-``SOURCES.txt`` file. That way, recipients of source distributions don't need
-to have revision control at all. However, if someone is working on a package
-by checking out with that system, they will need the same plugin(s) that the
-original author is using.
-
-A few important points for writing revision control file finders:
-
-* Your finder function MUST return relative paths, created by appending to the
- passed-in directory name. Absolute paths are NOT allowed, nor are relative
- paths that reference a parent directory of the passed-in directory.
-
-* Your finder function MUST accept an empty string as the directory name,
- meaning the current directory. You MUST NOT convert this to a dot; just
- yield relative paths. So, yielding a subdirectory named ``some/dir`` under
- the current directory should NOT be rendered as ``./some/dir`` or
- ``/somewhere/some/dir``, but *always* as simply ``some/dir``
-
-* Your finder function SHOULD NOT raise any errors, and SHOULD deal gracefully
- with the absence of needed programs (i.e., ones belonging to the revision
- control system itself. It *may*, however, use ``distutils.log.warn()`` to
- inform the user of the missing program(s).
-
-
-Subclassing ``Command``
------------------------
-
-Sorry, this section isn't written yet, and neither is a lot of what's below
-this point.
-
-XXX
-
-
-Reusing ``setuptools`` Code
-===========================
-
-``ez_setup``
-------------
-
-XXX
-
-
-``setuptools.archive_util``
----------------------------
-
-XXX
-
-
-``setuptools.sandbox``
-----------------------
-
-XXX
-
-
-``setuptools.package_index``
-----------------------------
-
-XXX
-
-
-Mailing List and Bug Tracker
-============================
-
-Please use the `distutils-sig mailing list`_ for questions and discussion about
-setuptools, and the `setuptools bug tracker`_ ONLY for issues you have
-confirmed via the list are actual bugs, and which you have reduced to a minimal
-set of steps to reproduce.
-
-.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/
-.. _setuptools bug tracker: https://github.com/pypa/setuptools/
-
diff --git a/easy_install.py b/easy_install.py
deleted file mode 100755
index d87e9840..00000000
--- a/easy_install.py
+++ /dev/null
@@ -1,5 +0,0 @@
-"""Run the EasyInstall command"""
-
-if __name__ == '__main__':
- from setuptools.command.easy_install import main
- main()
diff --git a/launcher.c b/launcher.c
deleted file mode 100755
index be69f0c6..00000000
--- a/launcher.c
+++ /dev/null
@@ -1,335 +0,0 @@
-/* Setuptools Script Launcher for Windows
-
- This is a stub executable for Windows that functions somewhat like
- Effbot's "exemaker", in that it runs a script with the same name but
- a .py extension, using information from a #! line. It differs in that
- it spawns the actual Python executable, rather than attempting to
- hook into the Python DLL. This means that the script will run with
- sys.executable set to the Python executable, where exemaker ends up with
- sys.executable pointing to itself. (Which means it won't work if you try
- to run another Python process using sys.executable.)
-
- To build/rebuild with mingw32, do this in the setuptools project directory:
-
- gcc -DGUI=0 -mno-cygwin -O -s -o setuptools/cli.exe launcher.c
- gcc -DGUI=1 -mwindows -mno-cygwin -O -s -o setuptools/gui.exe launcher.c
-
- To build for Windows RT, install both Visual Studio Express for Windows 8
- and for Windows Desktop (both freeware), create "win32" application using
- "Windows Desktop" version, create new "ARM" target via
- "Configuration Manager" menu and modify ".vcxproj" file by adding
- "<WindowsSDKDesktopARMSupport>true</WindowsSDKDesktopARMSupport>" tag
- as child of "PropertyGroup" tags that has "Debug|ARM" and "Release|ARM"
- properties.
-
- It links to msvcrt.dll, but this shouldn't be a problem since it doesn't
- actually run Python in the same process. Note that using 'exec' instead
- of 'spawn' doesn't work, because on Windows this leads to the Python
- executable running in the *background*, attached to the same console
- window, meaning you get a command prompt back *before* Python even finishes
- starting. So, we have to use spawnv() and wait for Python to exit before
- continuing. :(
-*/
-
-#include <stdlib.h>
-#include <stdio.h>
-#include <string.h>
-#include <windows.h>
-#include <tchar.h>
-#include <fcntl.h>
-
-int child_pid=0;
-
-int fail(char *format, char *data) {
- /* Print error message to stderr and return 2 */
- fprintf(stderr, format, data);
- return 2;
-}
-
-char *quoted(char *data) {
- int i, ln = strlen(data), nb;
-
- /* We allocate twice as much space as needed to deal with worse-case
- of having to escape everything. */
- char *result = calloc(ln*2+3, sizeof(char));
- char *presult = result;
-
- *presult++ = '"';
- for (nb=0, i=0; i < ln; i++)
- {
- if (data[i] == '\\')
- nb += 1;
- else if (data[i] == '"')
- {
- for (; nb > 0; nb--)
- *presult++ = '\\';
- *presult++ = '\\';
- }
- else
- nb = 0;
- *presult++ = data[i];
- }
-
- for (; nb > 0; nb--) /* Deal w trailing slashes */
- *presult++ = '\\';
-
- *presult++ = '"';
- *presult++ = 0;
- return result;
-}
-
-
-
-
-
-
-
-
-
-
-char *loadable_exe(char *exename) {
- /* HINSTANCE hPython; DLL handle for python executable */
- char *result;
-
- /* hPython = LoadLibraryEx(exename, NULL, LOAD_WITH_ALTERED_SEARCH_PATH);
- if (!hPython) return NULL; */
-
- /* Return the absolute filename for spawnv */
- result = calloc(MAX_PATH, sizeof(char));
- strncpy(result, exename, MAX_PATH);
- /*if (result) GetModuleFileNameA(hPython, result, MAX_PATH);
-
- FreeLibrary(hPython); */
- return result;
-}
-
-
-char *find_exe(char *exename, char *script) {
- char drive[_MAX_DRIVE], dir[_MAX_DIR], fname[_MAX_FNAME], ext[_MAX_EXT];
- char path[_MAX_PATH], c, *result;
-
- /* convert slashes to backslashes for uniform search below */
- result = exename;
- while (c = *result++) if (c=='/') result[-1] = '\\';
-
- _splitpath(exename, drive, dir, fname, ext);
- if (drive[0] || dir[0]=='\\') {
- return loadable_exe(exename); /* absolute path, use directly */
- }
- /* Use the script's parent directory, which should be the Python home
- (This should only be used for bdist_wininst-installed scripts, because
- easy_install-ed scripts use the absolute path to python[w].exe
- */
- _splitpath(script, drive, dir, fname, ext);
- result = dir + strlen(dir) -1;
- if (*result == '\\') result--;
- while (*result != '\\' && result>=dir) *result-- = 0;
- _makepath(path, drive, dir, exename, NULL);
- return loadable_exe(path);
-}
-
-
-char **parse_argv(char *cmdline, int *argc)
-{
- /* Parse a command line in-place using MS C rules */
-
- char **result = calloc(strlen(cmdline), sizeof(char *));
- char *output = cmdline;
- char c;
- int nb = 0;
- int iq = 0;
- *argc = 0;
-
- result[0] = output;
- while (isspace(*cmdline)) cmdline++; /* skip leading spaces */
-
- do {
- c = *cmdline++;
- if (!c || (isspace(c) && !iq)) {
- while (nb) {*output++ = '\\'; nb--; }
- *output++ = 0;
- result[++*argc] = output;
- if (!c) return result;
- while (isspace(*cmdline)) cmdline++; /* skip leading spaces */
- if (!*cmdline) return result; /* avoid empty arg if trailing ws */
- continue;
- }
- if (c == '\\')
- ++nb; /* count \'s */
- else {
- if (c == '"') {
- if (!(nb & 1)) { iq = !iq; c = 0; } /* skip " unless odd # of \ */
- nb = nb >> 1; /* cut \'s in half */
- }
- while (nb) {*output++ = '\\'; nb--; }
- if (c) *output++ = c;
- }
- } while (1);
-}
-
-void pass_control_to_child(DWORD control_type) {
- /*
- * distribute-issue207
- * passes the control event to child process (Python)
- */
- if (!child_pid) {
- return;
- }
- GenerateConsoleCtrlEvent(child_pid,0);
-}
-
-BOOL control_handler(DWORD control_type) {
- /*
- * distribute-issue207
- * control event handler callback function
- */
- switch (control_type) {
- case CTRL_C_EVENT:
- pass_control_to_child(0);
- break;
- }
- return TRUE;
-}
-
-int create_and_wait_for_subprocess(char* command) {
- /*
- * distribute-issue207
- * launches child process (Python)
- */
- DWORD return_value = 0;
- LPSTR commandline = command;
- STARTUPINFOA s_info;
- PROCESS_INFORMATION p_info;
- ZeroMemory(&p_info, sizeof(p_info));
- ZeroMemory(&s_info, sizeof(s_info));
- s_info.cb = sizeof(STARTUPINFO);
- // set-up control handler callback funciotn
- SetConsoleCtrlHandler((PHANDLER_ROUTINE) control_handler, TRUE);
- if (!CreateProcessA(NULL, commandline, NULL, NULL, TRUE, 0, NULL, NULL, &s_info, &p_info)) {
- fprintf(stderr, "failed to create process.\n");
- return 0;
- }
- child_pid = p_info.dwProcessId;
- // wait for Python to exit
- WaitForSingleObject(p_info.hProcess, INFINITE);
- if (!GetExitCodeProcess(p_info.hProcess, &return_value)) {
- fprintf(stderr, "failed to get exit code from process.\n");
- return 0;
- }
- return return_value;
-}
-
-char* join_executable_and_args(char *executable, char **args, int argc)
-{
- /*
- * distribute-issue207
- * CreateProcess needs a long string of the executable and command-line arguments,
- * so we need to convert it from the args that was built
- */
- int len,counter;
- char* cmdline;
-
- len=strlen(executable)+2;
- for (counter=1; counter<argc; counter++) {
- len+=strlen(args[counter])+1;
- }
-
- cmdline = (char*)calloc(len, sizeof(char));
- sprintf(cmdline, "%s", executable);
- len=strlen(executable);
- for (counter=1; counter<argc; counter++) {
- sprintf(cmdline+len, " %s", args[counter]);
- len+=strlen(args[counter])+1;
- }
- return cmdline;
-}
-
-int run(int argc, char **argv, int is_gui) {
-
- char python[256]; /* python executable's filename*/
- char *pyopt; /* Python option */
- char script[256]; /* the script's filename */
-
- int scriptf; /* file descriptor for script file */
-
- char **newargs, **newargsp, **parsedargs; /* argument array for exec */
- char *ptr, *end; /* working pointers for string manipulation */
- char *cmdline;
- int i, parsedargc; /* loop counter */
-
- /* compute script name from our .exe name*/
- GetModuleFileNameA(NULL, script, sizeof(script));
- end = script + strlen(script);
- while( end>script && *end != '.')
- *end-- = '\0';
- *end-- = '\0';
- strcat(script, (GUI ? "-script.pyw" : "-script.py"));
-
- /* figure out the target python executable */
-
- scriptf = open(script, O_RDONLY);
- if (scriptf == -1) {
- return fail("Cannot open %s\n", script);
- }
- end = python + read(scriptf, python, sizeof(python));
- close(scriptf);
-
- ptr = python-1;
- while(++ptr < end && *ptr && *ptr!='\n' && *ptr!='\r') {;}
-
- *ptr-- = '\0';
-
- if (strncmp(python, "#!", 2)) {
- /* default to python.exe if no #! header */
- strcpy(python, "#!python.exe");
- }
-
- parsedargs = parse_argv(python+2, &parsedargc);
-
- /* Using spawnv() can fail strangely if you e.g. find the Cygwin
- Python, so we'll make sure Windows can find and load it */
-
- ptr = find_exe(parsedargs[0], script);
- if (!ptr) {
- return fail("Cannot find Python executable %s\n", parsedargs[0]);
- }
-
- /* printf("Python executable: %s\n", ptr); */
-
- /* Argument array needs to be
- parsedargc + argc, plus 1 for null sentinel */
-
- newargs = (char **)calloc(parsedargc + argc + 1, sizeof(char *));
- newargsp = newargs;
-
- *newargsp++ = quoted(ptr);
- for (i = 1; i<parsedargc; i++) *newargsp++ = quoted(parsedargs[i]);
-
- *newargsp++ = quoted(script);
- for (i = 1; i < argc; i++) *newargsp++ = quoted(argv[i]);
-
- *newargsp++ = NULL;
-
- /* printf("args 0: %s\nargs 1: %s\n", newargs[0], newargs[1]); */
-
- if (is_gui) {
- /* Use exec, we don't need to wait for the GUI to finish */
- execv(ptr, (const char * const *)(newargs));
- return fail("Could not exec %s", ptr); /* shouldn't get here! */
- }
-
- /*
- * distribute-issue207: using CreateProcessA instead of spawnv
- */
- cmdline = join_executable_and_args(ptr, newargs, parsedargc + argc);
- return create_and_wait_for_subprocess(cmdline);
-}
-
-int WINAPI WinMain(HINSTANCE hI, HINSTANCE hP, LPSTR lpCmd, int nShow) {
- return run(__argc, __argv, GUI);
-}
-
-int main(int argc, char** argv) {
- return run(argc, argv, GUI);
-}
-
diff --git a/msvc-build-launcher.cmd b/msvc-build-launcher.cmd
deleted file mode 100644
index e54c4f6c..00000000
--- a/msvc-build-launcher.cmd
+++ /dev/null
@@ -1,55 +0,0 @@
-@echo off
-
-REM Use old Windows SDK 6.1 so created .exe will be compatible with
-REM old Windows versions.
-REM Windows SDK 6.1 may be downloaded at:
-REM http://www.microsoft.com/en-us/download/details.aspx?id=11310
-set PATH_OLD=%PATH%
-
-REM The SDK creates a false install of Visual Studio at one of these locations
-set PATH=C:\Program Files\Microsoft Visual Studio 9.0\VC\bin;%PATH%
-set PATH=C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\bin;%PATH%
-
-REM set up the environment to compile to x86
-call VCVARS32
-if "%ERRORLEVEL%"=="0" (
- cl /D "GUI=0" /D "WIN32_LEAN_AND_MEAN" launcher.c /O2 /link /MACHINE:x86 /SUBSYSTEM:CONSOLE /out:setuptools/cli-32.exe
- cl /D "GUI=1" /D "WIN32_LEAN_AND_MEAN" launcher.c /O2 /link /MACHINE:x86 /SUBSYSTEM:WINDOWS /out:setuptools/gui-32.exe
-) else (
- echo Windows SDK 6.1 not found to build Windows 32-bit version
-)
-
-REM buildout (and possibly other implementations) currently depend on
-REM the 32-bit launcher scripts without the -32 in the filename, so copy them
-REM there for now.
-copy setuptools/cli-32.exe setuptools/cli.exe
-copy setuptools/gui-32.exe setuptools/gui.exe
-
-REM now for 64-bit
-REM Use the x86_amd64 profile, which is the 32-bit cross compiler for amd64
-call VCVARSx86_amd64
-if "%ERRORLEVEL%"=="0" (
- cl /D "GUI=0" /D "WIN32_LEAN_AND_MEAN" launcher.c /O2 /link /MACHINE:x64 /SUBSYSTEM:CONSOLE /out:setuptools/cli-64.exe
- cl /D "GUI=1" /D "WIN32_LEAN_AND_MEAN" launcher.c /O2 /link /MACHINE:x64 /SUBSYSTEM:WINDOWS /out:setuptools/gui-64.exe
-) else (
- echo Windows SDK 6.1 not found to build Windows 64-bit version
-)
-
-REM Windows RT ARM build requires both freeware
-REM "Visual Studio Express 2012 for Windows 8" and
-REM "Visual Studio Express 2012 for Windows Desktop" to be installed from
-REM http://www.microsoft.com/visualstudio/eng/products/visual-studio-express-products
-set PATH=%PATH_OLD%
-set PATH=C:\Program Files\Microsoft Visual Studio 11.0\VC;%PATH%
-set PATH=C:\Program Files (x86)\Microsoft Visual Studio 11.0\VC;%PATH%
-call VCVARSALL x86_arm >nul 2>&1
-if "%ERRORLEVEL%"=="0" (
- echo Building Windows RT Version ...
- cl /D "GUI=0" /D "WIN32_LEAN_AND_MEAN" /D _ARM_WINAPI_PARTITION_DESKTOP_SDK_AVAILABLE launcher.c /O2 /link /MACHINE:ARM /SUBSYSTEM:CONSOLE /out:setuptools/cli-arm-32.exe
- cl /D "GUI=1" /D "WIN32_LEAN_AND_MEAN" /D _ARM_WINAPI_PARTITION_DESKTOP_SDK_AVAILABLE launcher.c /O2 /link /MACHINE:ARM /SUBSYSTEM:WINDOWS /out:setuptools/gui-arm-32.exe
-) else (
- echo Visual Studio ^(Express^) 2012 not found to build Windows RT Version
-)
-
-set PATH=%PATH_OLD%
-
diff --git a/pavement.py b/pavement.py
deleted file mode 100644
index 8d7574e2..00000000
--- a/pavement.py
+++ /dev/null
@@ -1,28 +0,0 @@
-import re
-
-from paver.easy import task, path as Path
-import pip
-
-def remove_all(paths):
- for path in paths:
- path.rmtree() if path.isdir() else path.remove()
-
-@task
-def update_vendored():
- vendor = Path('pkg_resources/_vendor')
- remove_all(vendor.glob('packaging*'))
- remove_all(vendor.glob('six*'))
- remove_all(vendor.glob('pyparsing*'))
- install_args = [
- 'install',
- '-r', str(vendor/'vendored.txt'),
- '-t', str(vendor),
- ]
- pip.main(install_args)
- packaging = vendor / 'packaging'
- for file in packaging.glob('*.py'):
- text = file.text()
- text = re.sub(r' (pyparsing|six)', r' pkg_resources.extern.\1', text)
- file.write_text(text)
- remove_all(vendor.glob('*.dist-info'))
- remove_all(vendor.glob('*.egg-info'))
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
deleted file mode 100644
index 2eab8230..00000000
--- a/pkg_resources/__init__.py
+++ /dev/null
@@ -1,2956 +0,0 @@
-"""
-Package resource API
---------------------
-
-A resource is a logical file contained within a package, or a logical
-subdirectory thereof. The package resource API expects resource names
-to have their path parts separated with ``/``, *not* whatever the local
-path separator is. Do not use os.path operations to manipulate resource
-names being passed into the API.
-
-The package resource API is designed to work with normal filesystem packages,
-.egg files, and unpacked .egg files. It can also work in a limited way with
-.zip files and with custom PEP 302 loaders that support the ``get_data()``
-method.
-"""
-
-from __future__ import absolute_import
-
-import sys
-import os
-import io
-import time
-import re
-import types
-import zipfile
-import zipimport
-import warnings
-import stat
-import functools
-import pkgutil
-import operator
-import platform
-import collections
-import plistlib
-import email.parser
-import tempfile
-import textwrap
-from pkgutil import get_importer
-
-try:
- import _imp
-except ImportError:
- # Python 3.2 compatibility
- import imp as _imp
-
-from pkg_resources.extern import six
-from pkg_resources.extern.six.moves import urllib, map, filter
-
-# capture these to bypass sandboxing
-from os import utime
-try:
- from os import mkdir, rename, unlink
- WRITE_SUPPORT = True
-except ImportError:
- # no write support, probably under GAE
- WRITE_SUPPORT = False
-
-from os import open as os_open
-from os.path import isdir, split
-
-try:
- import importlib.machinery as importlib_machinery
- # access attribute to force import under delayed import mechanisms.
- importlib_machinery.__name__
-except ImportError:
- importlib_machinery = None
-
-from pkg_resources.extern import packaging
-__import__('pkg_resources.extern.packaging.version')
-__import__('pkg_resources.extern.packaging.specifiers')
-__import__('pkg_resources.extern.packaging.requirements')
-__import__('pkg_resources.extern.packaging.markers')
-
-
-if (3, 0) < sys.version_info < (3, 3):
- msg = (
- "Support for Python 3.0-3.2 has been dropped. Future versions "
- "will fail here."
- )
- warnings.warn(msg)
-
-# declare some globals that will be defined later to
-# satisfy the linters.
-require = None
-working_set = None
-
-
-class PEP440Warning(RuntimeWarning):
- """
- Used when there is an issue with a version or specifier not complying with
- PEP 440.
- """
-
-
-class _SetuptoolsVersionMixin(object):
-
- def __hash__(self):
- return super(_SetuptoolsVersionMixin, self).__hash__()
-
- def __lt__(self, other):
- if isinstance(other, tuple):
- return tuple(self) < other
- else:
- return super(_SetuptoolsVersionMixin, self).__lt__(other)
-
- def __le__(self, other):
- if isinstance(other, tuple):
- return tuple(self) <= other
- else:
- return super(_SetuptoolsVersionMixin, self).__le__(other)
-
- def __eq__(self, other):
- if isinstance(other, tuple):
- return tuple(self) == other
- else:
- return super(_SetuptoolsVersionMixin, self).__eq__(other)
-
- def __ge__(self, other):
- if isinstance(other, tuple):
- return tuple(self) >= other
- else:
- return super(_SetuptoolsVersionMixin, self).__ge__(other)
-
- def __gt__(self, other):
- if isinstance(other, tuple):
- return tuple(self) > other
- else:
- return super(_SetuptoolsVersionMixin, self).__gt__(other)
-
- def __ne__(self, other):
- if isinstance(other, tuple):
- return tuple(self) != other
- else:
- return super(_SetuptoolsVersionMixin, self).__ne__(other)
-
- def __getitem__(self, key):
- return tuple(self)[key]
-
- def __iter__(self):
- component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
- replace = {
- 'pre': 'c',
- 'preview': 'c',
- '-': 'final-',
- 'rc': 'c',
- 'dev': '@',
- }.get
-
- def _parse_version_parts(s):
- for part in component_re.split(s):
- part = replace(part, part)
- if not part or part == '.':
- continue
- if part[:1] in '0123456789':
- # pad for numeric comparison
- yield part.zfill(8)
- else:
- yield '*'+part
-
- # ensure that alpha/beta/candidate are before final
- yield '*final'
-
- def old_parse_version(s):
- parts = []
- for part in _parse_version_parts(s.lower()):
- if part.startswith('*'):
- # remove '-' before a prerelease tag
- if part < '*final':
- while parts and parts[-1] == '*final-':
- parts.pop()
- # remove trailing zeros from each series of numeric parts
- while parts and parts[-1] == '00000000':
- parts.pop()
- parts.append(part)
- return tuple(parts)
-
- # Warn for use of this function
- warnings.warn(
- "You have iterated over the result of "
- "pkg_resources.parse_version. This is a legacy behavior which is "
- "inconsistent with the new version class introduced in setuptools "
- "8.0. In most cases, conversion to a tuple is unnecessary. For "
- "comparison of versions, sort the Version instances directly. If "
- "you have another use case requiring the tuple, please file a "
- "bug with the setuptools project describing that need.",
- RuntimeWarning,
- stacklevel=1,
- )
-
- for part in old_parse_version(str(self)):
- yield part
-
-
-class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version):
- pass
-
-
-class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin,
- packaging.version.LegacyVersion):
- pass
-
-
-def parse_version(v):
- try:
- return SetuptoolsVersion(v)
- except packaging.version.InvalidVersion:
- return SetuptoolsLegacyVersion(v)
-
-
-_state_vars = {}
-
-def _declare_state(vartype, **kw):
- globals().update(kw)
- _state_vars.update(dict.fromkeys(kw, vartype))
-
-def __getstate__():
- state = {}
- g = globals()
- for k, v in _state_vars.items():
- state[k] = g['_sget_'+v](g[k])
- return state
-
-def __setstate__(state):
- g = globals()
- for k, v in state.items():
- g['_sset_'+_state_vars[k]](k, g[k], v)
- return state
-
-def _sget_dict(val):
- return val.copy()
-
-def _sset_dict(key, ob, state):
- ob.clear()
- ob.update(state)
-
-def _sget_object(val):
- return val.__getstate__()
-
-def _sset_object(key, ob, state):
- ob.__setstate__(state)
-
-_sget_none = _sset_none = lambda *args: None
-
-
-def get_supported_platform():
- """Return this platform's maximum compatible version.
-
- distutils.util.get_platform() normally reports the minimum version
- of Mac OS X that would be required to *use* extensions produced by
- distutils. But what we want when checking compatibility is to know the
- version of Mac OS X that we are *running*. To allow usage of packages that
- explicitly require a newer version of Mac OS X, we must also know the
- current version of the OS.
-
- If this condition occurs for any other platform with a version in its
- platform strings, this function should be extended accordingly.
- """
- plat = get_build_platform()
- m = macosVersionString.match(plat)
- if m is not None and sys.platform == "darwin":
- try:
- plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
- except ValueError:
- # not Mac OS X
- pass
- return plat
-
-__all__ = [
- # Basic resource access and distribution/entry point discovery
- 'require', 'run_script', 'get_provider', 'get_distribution',
- 'load_entry_point', 'get_entry_map', 'get_entry_info',
- 'iter_entry_points',
- 'resource_string', 'resource_stream', 'resource_filename',
- 'resource_listdir', 'resource_exists', 'resource_isdir',
-
- # Environmental control
- 'declare_namespace', 'working_set', 'add_activation_listener',
- 'find_distributions', 'set_extraction_path', 'cleanup_resources',
- 'get_default_cache',
-
- # Primary implementation classes
- 'Environment', 'WorkingSet', 'ResourceManager',
- 'Distribution', 'Requirement', 'EntryPoint',
-
- # Exceptions
- 'ResolutionError', 'VersionConflict', 'DistributionNotFound',
- 'UnknownExtra', 'ExtractionError',
-
- # Warnings
- 'PEP440Warning',
-
- # Parsing functions and string utilities
- 'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
- 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
- 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
-
- # filesystem utilities
- 'ensure_directory', 'normalize_path',
-
- # Distribution "precedence" constants
- 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
-
- # "Provider" interfaces, implementations, and registration/lookup APIs
- 'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
- 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
- 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
- 'register_finder', 'register_namespace_handler', 'register_loader_type',
- 'fixup_namespace_packages', 'get_importer',
-
- # Deprecated/backward compatibility only
- 'run_main', 'AvailableDistributions',
-]
-
-class ResolutionError(Exception):
- """Abstract base for dependency resolution errors"""
- def __repr__(self):
- return self.__class__.__name__+repr(self.args)
-
-
-class VersionConflict(ResolutionError):
- """
- An already-installed version conflicts with the requested version.
-
- Should be initialized with the installed Distribution and the requested
- Requirement.
- """
-
- _template = "{self.dist} is installed but {self.req} is required"
-
- @property
- def dist(self):
- return self.args[0]
-
- @property
- def req(self):
- return self.args[1]
-
- def report(self):
- return self._template.format(**locals())
-
- def with_context(self, required_by):
- """
- If required_by is non-empty, return a version of self that is a
- ContextualVersionConflict.
- """
- if not required_by:
- return self
- args = self.args + (required_by,)
- return ContextualVersionConflict(*args)
-
-
-class ContextualVersionConflict(VersionConflict):
- """
- A VersionConflict that accepts a third parameter, the set of the
- requirements that required the installed Distribution.
- """
-
- _template = VersionConflict._template + ' by {self.required_by}'
-
- @property
- def required_by(self):
- return self.args[2]
-
-
-class DistributionNotFound(ResolutionError):
- """A requested distribution was not found"""
-
- _template = ("The '{self.req}' distribution was not found "
- "and is required by {self.requirers_str}")
-
- @property
- def req(self):
- return self.args[0]
-
- @property
- def requirers(self):
- return self.args[1]
-
- @property
- def requirers_str(self):
- if not self.requirers:
- return 'the application'
- return ', '.join(self.requirers)
-
- def report(self):
- return self._template.format(**locals())
-
- def __str__(self):
- return self.report()
-
-
-class UnknownExtra(ResolutionError):
- """Distribution doesn't have an "extra feature" of the given name"""
-_provider_factories = {}
-
-PY_MAJOR = sys.version[:3]
-EGG_DIST = 3
-BINARY_DIST = 2
-SOURCE_DIST = 1
-CHECKOUT_DIST = 0
-DEVELOP_DIST = -1
-
-def register_loader_type(loader_type, provider_factory):
- """Register `provider_factory` to make providers for `loader_type`
-
- `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
- and `provider_factory` is a function that, passed a *module* object,
- returns an ``IResourceProvider`` for that module.
- """
- _provider_factories[loader_type] = provider_factory
-
-def get_provider(moduleOrReq):
- """Return an IResourceProvider for the named module or requirement"""
- if isinstance(moduleOrReq, Requirement):
- return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
- try:
- module = sys.modules[moduleOrReq]
- except KeyError:
- __import__(moduleOrReq)
- module = sys.modules[moduleOrReq]
- loader = getattr(module, '__loader__', None)
- return _find_adapter(_provider_factories, loader)(module)
-
-def _macosx_vers(_cache=[]):
- if not _cache:
- version = platform.mac_ver()[0]
- # fallback for MacPorts
- if version == '':
- plist = '/System/Library/CoreServices/SystemVersion.plist'
- if os.path.exists(plist):
- if hasattr(plistlib, 'readPlist'):
- plist_content = plistlib.readPlist(plist)
- if 'ProductVersion' in plist_content:
- version = plist_content['ProductVersion']
-
- _cache.append(version.split('.'))
- return _cache[0]
-
-def _macosx_arch(machine):
- return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
-
-def get_build_platform():
- """Return this platform's string for platform-specific distributions
-
- XXX Currently this is the same as ``distutils.util.get_platform()``, but it
- needs some hacks for Linux and Mac OS X.
- """
- try:
- # Python 2.7 or >=3.2
- from sysconfig import get_platform
- except ImportError:
- from distutils.util import get_platform
-
- plat = get_platform()
- if sys.platform == "darwin" and not plat.startswith('macosx-'):
- try:
- version = _macosx_vers()
- machine = os.uname()[4].replace(" ", "_")
- return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
- _macosx_arch(machine))
- except ValueError:
- # if someone is running a non-Mac darwin system, this will fall
- # through to the default implementation
- pass
- return plat
-
-macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
-darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
-# XXX backward compat
-get_platform = get_build_platform
-
-
-def compatible_platforms(provided, required):
- """Can code for the `provided` platform run on the `required` platform?
-
- Returns true if either platform is ``None``, or the platforms are equal.
-
- XXX Needs compatibility checks for Linux and other unixy OSes.
- """
- if provided is None or required is None or provided==required:
- # easy case
- return True
-
- # Mac OS X special cases
- reqMac = macosVersionString.match(required)
- if reqMac:
- provMac = macosVersionString.match(provided)
-
- # is this a Mac package?
- if not provMac:
- # this is backwards compatibility for packages built before
- # setuptools 0.6. All packages built after this point will
- # use the new macosx designation.
- provDarwin = darwinVersionString.match(provided)
- if provDarwin:
- dversion = int(provDarwin.group(1))
- macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
- if dversion == 7 and macosversion >= "10.3" or \
- dversion == 8 and macosversion >= "10.4":
- return True
- # egg isn't macosx or legacy darwin
- return False
-
- # are they the same major version and machine type?
- if provMac.group(1) != reqMac.group(1) or \
- provMac.group(3) != reqMac.group(3):
- return False
-
- # is the required OS major update >= the provided one?
- if int(provMac.group(2)) > int(reqMac.group(2)):
- return False
-
- return True
-
- # XXX Linux and other platforms' special cases should go here
- return False
-
-
-def run_script(dist_spec, script_name):
- """Locate distribution `dist_spec` and run its `script_name` script"""
- ns = sys._getframe(1).f_globals
- name = ns['__name__']
- ns.clear()
- ns['__name__'] = name
- require(dist_spec)[0].run_script(script_name, ns)
-
-# backward compatibility
-run_main = run_script
-
-def get_distribution(dist):
- """Return a current distribution object for a Requirement or string"""
- if isinstance(dist, six.string_types):
- dist = Requirement.parse(dist)
- if isinstance(dist, Requirement):
- dist = get_provider(dist)
- if not isinstance(dist, Distribution):
- raise TypeError("Expected string, Requirement, or Distribution", dist)
- return dist
-
-def load_entry_point(dist, group, name):
- """Return `name` entry point of `group` for `dist` or raise ImportError"""
- return get_distribution(dist).load_entry_point(group, name)
-
-def get_entry_map(dist, group=None):
- """Return the entry point map for `group`, or the full entry map"""
- return get_distribution(dist).get_entry_map(group)
-
-def get_entry_info(dist, group, name):
- """Return the EntryPoint object for `group`+`name`, or ``None``"""
- return get_distribution(dist).get_entry_info(group, name)
-
-
-class IMetadataProvider:
-
- def has_metadata(name):
- """Does the package's distribution contain the named metadata?"""
-
- def get_metadata(name):
- """The named metadata resource as a string"""
-
- def get_metadata_lines(name):
- """Yield named metadata resource as list of non-blank non-comment lines
-
- Leading and trailing whitespace is stripped from each line, and lines
- with ``#`` as the first non-blank character are omitted."""
-
- def metadata_isdir(name):
- """Is the named metadata a directory? (like ``os.path.isdir()``)"""
-
- def metadata_listdir(name):
- """List of metadata names in the directory (like ``os.listdir()``)"""
-
- def run_script(script_name, namespace):
- """Execute the named script in the supplied namespace dictionary"""
-
-
-class IResourceProvider(IMetadataProvider):
- """An object that provides access to package resources"""
-
- def get_resource_filename(manager, resource_name):
- """Return a true filesystem path for `resource_name`
-
- `manager` must be an ``IResourceManager``"""
-
- def get_resource_stream(manager, resource_name):
- """Return a readable file-like object for `resource_name`
-
- `manager` must be an ``IResourceManager``"""
-
- def get_resource_string(manager, resource_name):
- """Return a string containing the contents of `resource_name`
-
- `manager` must be an ``IResourceManager``"""
-
- def has_resource(resource_name):
- """Does the package contain the named resource?"""
-
- def resource_isdir(resource_name):
- """Is the named resource a directory? (like ``os.path.isdir()``)"""
-
- def resource_listdir(resource_name):
- """List of resource names in the directory (like ``os.listdir()``)"""
-
-
-class WorkingSet(object):
- """A collection of active distributions on sys.path (or a similar list)"""
-
- def __init__(self, entries=None):
- """Create working set from list of path entries (default=sys.path)"""
- self.entries = []
- self.entry_keys = {}
- self.by_key = {}
- self.callbacks = []
-
- if entries is None:
- entries = sys.path
-
- for entry in entries:
- self.add_entry(entry)
-
- @classmethod
- def _build_master(cls):
- """
- Prepare the master working set.
- """
- ws = cls()
- try:
- from __main__ import __requires__
- except ImportError:
- # The main program does not list any requirements
- return ws
-
- # ensure the requirements are met
- try:
- ws.require(__requires__)
- except VersionConflict:
- return cls._build_from_requirements(__requires__)
-
- return ws
-
- @classmethod
- def _build_from_requirements(cls, req_spec):
- """
- Build a working set from a requirement spec. Rewrites sys.path.
- """
- # try it without defaults already on sys.path
- # by starting with an empty path
- ws = cls([])
- reqs = parse_requirements(req_spec)
- dists = ws.resolve(reqs, Environment())
- for dist in dists:
- ws.add(dist)
-
- # add any missing entries from sys.path
- for entry in sys.path:
- if entry not in ws.entries:
- ws.add_entry(entry)
-
- # then copy back to sys.path
- sys.path[:] = ws.entries
- return ws
-
- def add_entry(self, entry):
- """Add a path item to ``.entries``, finding any distributions on it
-
- ``find_distributions(entry, True)`` is used to find distributions
- corresponding to the path entry, and they are added. `entry` is
- always appended to ``.entries``, even if it is already present.
- (This is because ``sys.path`` can contain the same value more than
- once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
- equal ``sys.path``.)
- """
- self.entry_keys.setdefault(entry, [])
- self.entries.append(entry)
- for dist in find_distributions(entry, True):
- self.add(dist, entry, False)
-
- def __contains__(self, dist):
- """True if `dist` is the active distribution for its project"""
- return self.by_key.get(dist.key) == dist
-
- def find(self, req):
- """Find a distribution matching requirement `req`
-
- If there is an active distribution for the requested project, this
- returns it as long as it meets the version requirement specified by
- `req`. But, if there is an active distribution for the project and it
- does *not* meet the `req` requirement, ``VersionConflict`` is raised.
- If there is no active distribution for the requested project, ``None``
- is returned.
- """
- dist = self.by_key.get(req.key)
- if dist is not None and dist not in req:
- # XXX add more info
- raise VersionConflict(dist, req)
- return dist
-
- def iter_entry_points(self, group, name=None):
- """Yield entry point objects from `group` matching `name`
-
- If `name` is None, yields all entry points in `group` from all
- distributions in the working set, otherwise only ones matching
- both `group` and `name` are yielded (in distribution order).
- """
- for dist in self:
- entries = dist.get_entry_map(group)
- if name is None:
- for ep in entries.values():
- yield ep
- elif name in entries:
- yield entries[name]
-
- def run_script(self, requires, script_name):
- """Locate distribution for `requires` and run `script_name` script"""
- ns = sys._getframe(1).f_globals
- name = ns['__name__']
- ns.clear()
- ns['__name__'] = name
- self.require(requires)[0].run_script(script_name, ns)
-
- def __iter__(self):
- """Yield distributions for non-duplicate projects in the working set
-
- The yield order is the order in which the items' path entries were
- added to the working set.
- """
- seen = {}
- for item in self.entries:
- if item not in self.entry_keys:
- # workaround a cache issue
- continue
-
- for key in self.entry_keys[item]:
- if key not in seen:
- seen[key]=1
- yield self.by_key[key]
-
- def add(self, dist, entry=None, insert=True, replace=False):
- """Add `dist` to working set, associated with `entry`
-
- If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
- On exit from this routine, `entry` is added to the end of the working
- set's ``.entries`` (if it wasn't already present).
-
- `dist` is only added to the working set if it's for a project that
- doesn't already have a distribution in the set, unless `replace=True`.
- If it's added, any callbacks registered with the ``subscribe()`` method
- will be called.
- """
- if insert:
- dist.insert_on(self.entries, entry, replace=replace)
-
- if entry is None:
- entry = dist.location
- keys = self.entry_keys.setdefault(entry,[])
- keys2 = self.entry_keys.setdefault(dist.location,[])
- if not replace and dist.key in self.by_key:
- # ignore hidden distros
- return
-
- self.by_key[dist.key] = dist
- if dist.key not in keys:
- keys.append(dist.key)
- if dist.key not in keys2:
- keys2.append(dist.key)
- self._added_new(dist)
-
- def resolve(self, requirements, env=None, installer=None,
- replace_conflicting=False):
- """List all distributions needed to (recursively) meet `requirements`
-
- `requirements` must be a sequence of ``Requirement`` objects. `env`,
- if supplied, should be an ``Environment`` instance. If
- not supplied, it defaults to all distributions available within any
- entry or distribution in the working set. `installer`, if supplied,
- will be invoked with each requirement that cannot be met by an
- already-installed distribution; it should return a ``Distribution`` or
- ``None``.
-
- Unless `replace_conflicting=True`, raises a VersionConflict exception if
- any requirements are found on the path that have the correct name but
- the wrong version. Otherwise, if an `installer` is supplied it will be
- invoked to obtain the correct version of the requirement and activate
- it.
- """
-
- # set up the stack
- requirements = list(requirements)[::-1]
- # set of processed requirements
- processed = {}
- # key -> dist
- best = {}
- to_activate = []
-
- req_extras = _ReqExtras()
-
- # Mapping of requirement to set of distributions that required it;
- # useful for reporting info about conflicts.
- required_by = collections.defaultdict(set)
-
- while requirements:
- # process dependencies breadth-first
- req = requirements.pop(0)
- if req in processed:
- # Ignore cyclic or redundant dependencies
- continue
-
- if not req_extras.markers_pass(req):
- continue
-
- dist = best.get(req.key)
- if dist is None:
- # Find the best distribution and add it to the map
- dist = self.by_key.get(req.key)
- if dist is None or (dist not in req and replace_conflicting):
- ws = self
- if env is None:
- if dist is None:
- env = Environment(self.entries)
- else:
- # Use an empty environment and workingset to avoid
- # any further conflicts with the conflicting
- # distribution
- env = Environment([])
- ws = WorkingSet([])
- dist = best[req.key] = env.best_match(req, ws, installer)
- if dist is None:
- requirers = required_by.get(req, None)
- raise DistributionNotFound(req, requirers)
- to_activate.append(dist)
- if dist not in req:
- # Oops, the "best" so far conflicts with a dependency
- dependent_req = required_by[req]
- raise VersionConflict(dist, req).with_context(dependent_req)
-
- # push the new requirements onto the stack
- new_requirements = dist.requires(req.extras)[::-1]
- requirements.extend(new_requirements)
-
- # Register the new requirements needed by req
- for new_requirement in new_requirements:
- required_by[new_requirement].add(req.project_name)
- req_extras[new_requirement] = req.extras
-
- processed[req] = True
-
- # return list of distros to activate
- return to_activate
-
- def find_plugins(self, plugin_env, full_env=None, installer=None,
- fallback=True):
- """Find all activatable distributions in `plugin_env`
-
- Example usage::
-
- distributions, errors = working_set.find_plugins(
- Environment(plugin_dirlist)
- )
- # add plugins+libs to sys.path
- map(working_set.add, distributions)
- # display errors
- print('Could not load', errors)
-
- The `plugin_env` should be an ``Environment`` instance that contains
- only distributions that are in the project's "plugin directory" or
- directories. The `full_env`, if supplied, should be an ``Environment``
- contains all currently-available distributions. If `full_env` is not
- supplied, one is created automatically from the ``WorkingSet`` this
- method is called on, which will typically mean that every directory on
- ``sys.path`` will be scanned for distributions.
-
- `installer` is a standard installer callback as used by the
- ``resolve()`` method. The `fallback` flag indicates whether we should
- attempt to resolve older versions of a plugin if the newest version
- cannot be resolved.
-
- This method returns a 2-tuple: (`distributions`, `error_info`), where
- `distributions` is a list of the distributions found in `plugin_env`
- that were loadable, along with any other distributions that are needed
- to resolve their dependencies. `error_info` is a dictionary mapping
- unloadable plugin distributions to an exception instance describing the
- error that occurred. Usually this will be a ``DistributionNotFound`` or
- ``VersionConflict`` instance.
- """
-
- plugin_projects = list(plugin_env)
- # scan project names in alphabetic order
- plugin_projects.sort()
-
- error_info = {}
- distributions = {}
-
- if full_env is None:
- env = Environment(self.entries)
- env += plugin_env
- else:
- env = full_env + plugin_env
-
- shadow_set = self.__class__([])
- # put all our entries in shadow_set
- list(map(shadow_set.add, self))
-
- for project_name in plugin_projects:
-
- for dist in plugin_env[project_name]:
-
- req = [dist.as_requirement()]
-
- try:
- resolvees = shadow_set.resolve(req, env, installer)
-
- except ResolutionError as v:
- # save error info
- error_info[dist] = v
- if fallback:
- # try the next older version of project
- continue
- else:
- # give up on this project, keep going
- break
-
- else:
- list(map(shadow_set.add, resolvees))
- distributions.update(dict.fromkeys(resolvees))
-
- # success, no need to try any more versions of this project
- break
-
- distributions = list(distributions)
- distributions.sort()
-
- return distributions, error_info
-
- def require(self, *requirements):
- """Ensure that distributions matching `requirements` are activated
-
- `requirements` must be a string or a (possibly-nested) sequence
- thereof, specifying the distributions and versions required. The
- return value is a sequence of the distributions that needed to be
- activated to fulfill the requirements; all relevant distributions are
- included, even if they were already activated in this working set.
- """
- needed = self.resolve(parse_requirements(requirements))
-
- for dist in needed:
- self.add(dist)
-
- return needed
-
- def subscribe(self, callback):
- """Invoke `callback` for all distributions (including existing ones)"""
- if callback in self.callbacks:
- return
- self.callbacks.append(callback)
- for dist in self:
- callback(dist)
-
- def _added_new(self, dist):
- for callback in self.callbacks:
- callback(dist)
-
- def __getstate__(self):
- return (
- self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
- self.callbacks[:]
- )
-
- def __setstate__(self, e_k_b_c):
- entries, keys, by_key, callbacks = e_k_b_c
- self.entries = entries[:]
- self.entry_keys = keys.copy()
- self.by_key = by_key.copy()
- self.callbacks = callbacks[:]
-
-
-class _ReqExtras(dict):
- """
- Map each requirement to the extras that demanded it.
- """
-
- def markers_pass(self, req):
- """
- Evaluate markers for req against each extra that
- demanded it.
-
- Return False if the req has a marker and fails
- evaluation. Otherwise, return True.
- """
- extra_evals = (
- req.marker.evaluate({'extra': extra})
- for extra in self.get(req, ()) + (None,)
- )
- return not req.marker or any(extra_evals)
-
-
-class Environment(object):
- """Searchable snapshot of distributions on a search path"""
-
- def __init__(self, search_path=None, platform=get_supported_platform(),
- python=PY_MAJOR):
- """Snapshot distributions available on a search path
-
- Any distributions found on `search_path` are added to the environment.
- `search_path` should be a sequence of ``sys.path`` items. If not
- supplied, ``sys.path`` is used.
-
- `platform` is an optional string specifying the name of the platform
- that platform-specific distributions must be compatible with. If
- unspecified, it defaults to the current platform. `python` is an
- optional string naming the desired version of Python (e.g. ``'3.3'``);
- it defaults to the current version.
-
- You may explicitly set `platform` (and/or `python`) to ``None`` if you
- wish to map *all* distributions, not just those compatible with the
- running platform or Python version.
- """
- self._distmap = {}
- self.platform = platform
- self.python = python
- self.scan(search_path)
-
- def can_add(self, dist):
- """Is distribution `dist` acceptable for this environment?
-
- The distribution must match the platform and python version
- requirements specified when this environment was created, or False
- is returned.
- """
- return (self.python is None or dist.py_version is None
- or dist.py_version==self.python) \
- and compatible_platforms(dist.platform, self.platform)
-
- def remove(self, dist):
- """Remove `dist` from the environment"""
- self._distmap[dist.key].remove(dist)
-
- def scan(self, search_path=None):
- """Scan `search_path` for distributions usable in this environment
-
- Any distributions found are added to the environment.
- `search_path` should be a sequence of ``sys.path`` items. If not
- supplied, ``sys.path`` is used. Only distributions conforming to
- the platform/python version defined at initialization are added.
- """
- if search_path is None:
- search_path = sys.path
-
- for item in search_path:
- for dist in find_distributions(item):
- self.add(dist)
-
- def __getitem__(self, project_name):
- """Return a newest-to-oldest list of distributions for `project_name`
-
- Uses case-insensitive `project_name` comparison, assuming all the
- project's distributions use their project's name converted to all
- lowercase as their key.
-
- """
- distribution_key = project_name.lower()
- return self._distmap.get(distribution_key, [])
-
- def add(self, dist):
- """Add `dist` if we ``can_add()`` it and it has not already been added
- """
- if self.can_add(dist) and dist.has_version():
- dists = self._distmap.setdefault(dist.key, [])
- if dist not in dists:
- dists.append(dist)
- dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
-
- def best_match(self, req, working_set, installer=None):
- """Find distribution best matching `req` and usable on `working_set`
-
- This calls the ``find(req)`` method of the `working_set` to see if a
- suitable distribution is already active. (This may raise
- ``VersionConflict`` if an unsuitable version of the project is already
- active in the specified `working_set`.) If a suitable distribution
- isn't active, this method returns the newest distribution in the
- environment that meets the ``Requirement`` in `req`. If no suitable
- distribution is found, and `installer` is supplied, then the result of
- calling the environment's ``obtain(req, installer)`` method will be
- returned.
- """
- dist = working_set.find(req)
- if dist is not None:
- return dist
- for dist in self[req.key]:
- if dist in req:
- return dist
- # try to download/install
- return self.obtain(req, installer)
-
- def obtain(self, requirement, installer=None):
- """Obtain a distribution matching `requirement` (e.g. via download)
-
- Obtain a distro that matches requirement (e.g. via download). In the
- base ``Environment`` class, this routine just returns
- ``installer(requirement)``, unless `installer` is None, in which case
- None is returned instead. This method is a hook that allows subclasses
- to attempt other ways of obtaining a distribution before falling back
- to the `installer` argument."""
- if installer is not None:
- return installer(requirement)
-
- def __iter__(self):
- """Yield the unique project names of the available distributions"""
- for key in self._distmap.keys():
- if self[key]:
- yield key
-
- def __iadd__(self, other):
- """In-place addition of a distribution or environment"""
- if isinstance(other, Distribution):
- self.add(other)
- elif isinstance(other, Environment):
- for project in other:
- for dist in other[project]:
- self.add(dist)
- else:
- raise TypeError("Can't add %r to environment" % (other,))
- return self
-
- def __add__(self, other):
- """Add an environment or distribution to an environment"""
- new = self.__class__([], platform=None, python=None)
- for env in self, other:
- new += env
- return new
-
-
-# XXX backward compatibility
-AvailableDistributions = Environment
-
-
-class ExtractionError(RuntimeError):
- """An error occurred extracting a resource
-
- The following attributes are available from instances of this exception:
-
- manager
- The resource manager that raised this exception
-
- cache_path
- The base directory for resource extraction
-
- original_error
- The exception instance that caused extraction to fail
- """
-
-
-class ResourceManager:
- """Manage resource extraction and packages"""
- extraction_path = None
-
- def __init__(self):
- self.cached_files = {}
-
- def resource_exists(self, package_or_requirement, resource_name):
- """Does the named resource exist?"""
- return get_provider(package_or_requirement).has_resource(resource_name)
-
- def resource_isdir(self, package_or_requirement, resource_name):
- """Is the named resource an existing directory?"""
- return get_provider(package_or_requirement).resource_isdir(
- resource_name
- )
-
- def resource_filename(self, package_or_requirement, resource_name):
- """Return a true filesystem path for specified resource"""
- return get_provider(package_or_requirement).get_resource_filename(
- self, resource_name
- )
-
- def resource_stream(self, package_or_requirement, resource_name):
- """Return a readable file-like object for specified resource"""
- return get_provider(package_or_requirement).get_resource_stream(
- self, resource_name
- )
-
- def resource_string(self, package_or_requirement, resource_name):
- """Return specified resource as a string"""
- return get_provider(package_or_requirement).get_resource_string(
- self, resource_name
- )
-
- def resource_listdir(self, package_or_requirement, resource_name):
- """List the contents of the named resource directory"""
- return get_provider(package_or_requirement).resource_listdir(
- resource_name
- )
-
- def extraction_error(self):
- """Give an error message for problems extracting file(s)"""
-
- old_exc = sys.exc_info()[1]
- cache_path = self.extraction_path or get_default_cache()
-
- tmpl = textwrap.dedent("""
- Can't extract file(s) to egg cache
-
- The following error occurred while trying to extract file(s) to the Python egg
- cache:
-
- {old_exc}
-
- The Python egg cache directory is currently set to:
-
- {cache_path}
-
- Perhaps your account does not have write access to this directory? You can
- change the cache directory by setting the PYTHON_EGG_CACHE environment
- variable to point to an accessible directory.
- """).lstrip()
- err = ExtractionError(tmpl.format(**locals()))
- err.manager = self
- err.cache_path = cache_path
- err.original_error = old_exc
- raise err
-
- def get_cache_path(self, archive_name, names=()):
- """Return absolute location in cache for `archive_name` and `names`
-
- The parent directory of the resulting path will be created if it does
- not already exist. `archive_name` should be the base filename of the
- enclosing egg (which may not be the name of the enclosing zipfile!),
- including its ".egg" extension. `names`, if provided, should be a
- sequence of path name parts "under" the egg's extraction location.
-
- This method should only be called by resource providers that need to
- obtain an extraction location, and only for names they intend to
- extract, as it tracks the generated names for possible cleanup later.
- """
- extract_path = self.extraction_path or get_default_cache()
- target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
- try:
- _bypass_ensure_directory(target_path)
- except:
- self.extraction_error()
-
- self._warn_unsafe_extraction_path(extract_path)
-
- self.cached_files[target_path] = 1
- return target_path
-
- @staticmethod
- def _warn_unsafe_extraction_path(path):
- """
- If the default extraction path is overridden and set to an insecure
- location, such as /tmp, it opens up an opportunity for an attacker to
- replace an extracted file with an unauthorized payload. Warn the user
- if a known insecure location is used.
-
- See Distribute #375 for more details.
- """
- if os.name == 'nt' and not path.startswith(os.environ['windir']):
- # On Windows, permissions are generally restrictive by default
- # and temp directories are not writable by other users, so
- # bypass the warning.
- return
- mode = os.stat(path).st_mode
- if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
- msg = ("%s is writable by group/others and vulnerable to attack "
- "when "
- "used with get_resource_filename. Consider a more secure "
- "location (set with .set_extraction_path or the "
- "PYTHON_EGG_CACHE environment variable)." % path)
- warnings.warn(msg, UserWarning)
-
- def postprocess(self, tempname, filename):
- """Perform any platform-specific postprocessing of `tempname`
-
- This is where Mac header rewrites should be done; other platforms don't
- have anything special they should do.
-
- Resource providers should call this method ONLY after successfully
- extracting a compressed resource. They must NOT call it on resources
- that are already in the filesystem.
-
- `tempname` is the current (temporary) name of the file, and `filename`
- is the name it will be renamed to by the caller after this routine
- returns.
- """
-
- if os.name == 'posix':
- # Make the resource executable
- mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
- os.chmod(tempname, mode)
-
- def set_extraction_path(self, path):
- """Set the base path where resources will be extracted to, if needed.
-
- If you do not call this routine before any extractions take place, the
- path defaults to the return value of ``get_default_cache()``. (Which
- is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
- platform-specific fallbacks. See that routine's documentation for more
- details.)
-
- Resources are extracted to subdirectories of this path based upon
- information given by the ``IResourceProvider``. You may set this to a
- temporary directory, but then you must call ``cleanup_resources()`` to
- delete the extracted files when done. There is no guarantee that
- ``cleanup_resources()`` will be able to remove all extracted files.
-
- (Note: you may not change the extraction path for a given resource
- manager once resources have been extracted, unless you first call
- ``cleanup_resources()``.)
- """
- if self.cached_files:
- raise ValueError(
- "Can't change extraction path, files already extracted"
- )
-
- self.extraction_path = path
-
- def cleanup_resources(self, force=False):
- """
- Delete all extracted resource files and directories, returning a list
- of the file and directory names that could not be successfully removed.
- This function does not have any concurrency protection, so it should
- generally only be called when the extraction path is a temporary
- directory exclusive to a single process. This method is not
- automatically called; you must call it explicitly or register it as an
- ``atexit`` function if you wish to ensure cleanup of a temporary
- directory used for extractions.
- """
- # XXX
-
-def get_default_cache():
- """Determine the default cache location
-
- This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
- Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
- "Application Data" directory. On all other systems, it's "~/.python-eggs".
- """
- try:
- return os.environ['PYTHON_EGG_CACHE']
- except KeyError:
- pass
-
- if os.name!='nt':
- return os.path.expanduser('~/.python-eggs')
-
- # XXX this may be locale-specific!
- app_data = 'Application Data'
- app_homes = [
- # best option, should be locale-safe
- (('APPDATA',), None),
- (('USERPROFILE',), app_data),
- (('HOMEDRIVE','HOMEPATH'), app_data),
- (('HOMEPATH',), app_data),
- (('HOME',), None),
- # 95/98/ME
- (('WINDIR',), app_data),
- ]
-
- for keys, subdir in app_homes:
- dirname = ''
- for key in keys:
- if key in os.environ:
- dirname = os.path.join(dirname, os.environ[key])
- else:
- break
- else:
- if subdir:
- dirname = os.path.join(dirname, subdir)
- return os.path.join(dirname, 'Python-Eggs')
- else:
- raise RuntimeError(
- "Please set the PYTHON_EGG_CACHE enviroment variable"
- )
-
-def safe_name(name):
- """Convert an arbitrary string to a standard distribution name
-
- Any runs of non-alphanumeric/. characters are replaced with a single '-'.
- """
- return re.sub('[^A-Za-z0-9.]+', '-', name)
-
-
-def safe_version(version):
- """
- Convert an arbitrary string to a standard version string
- """
- try:
- # normalize the version
- return str(packaging.version.Version(version))
- except packaging.version.InvalidVersion:
- version = version.replace(' ','.')
- return re.sub('[^A-Za-z0-9.]+', '-', version)
-
-
-def safe_extra(extra):
- """Convert an arbitrary string to a standard 'extra' name
-
- Any runs of non-alphanumeric characters are replaced with a single '_',
- and the result is always lowercased.
- """
- return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
-
-
-def to_filename(name):
- """Convert a project or version name to its filename-escaped form
-
- Any '-' characters are currently replaced with '_'.
- """
- return name.replace('-','_')
-
-
-def invalid_marker(text):
- """
- Validate text as a PEP 508 environment marker; return an exception
- if invalid or False otherwise.
- """
- try:
- evaluate_marker(text)
- except SyntaxError as e:
- e.filename = None
- e.lineno = None
- return e
- return False
-
-
-def evaluate_marker(text, extra=None):
- """
- Evaluate a PEP 508 environment marker.
- Return a boolean indicating the marker result in this environment.
- Raise SyntaxError if marker is invalid.
-
- This implementation uses the 'pyparsing' module.
- """
- try:
- marker = packaging.markers.Marker(text)
- return marker.evaluate()
- except packaging.markers.InvalidMarker as e:
- raise SyntaxError(e)
-
-
-class NullProvider:
- """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
-
- egg_name = None
- egg_info = None
- loader = None
-
- def __init__(self, module):
- self.loader = getattr(module, '__loader__', None)
- self.module_path = os.path.dirname(getattr(module, '__file__', ''))
-
- def get_resource_filename(self, manager, resource_name):
- return self._fn(self.module_path, resource_name)
-
- def get_resource_stream(self, manager, resource_name):
- return io.BytesIO(self.get_resource_string(manager, resource_name))
-
- def get_resource_string(self, manager, resource_name):
- return self._get(self._fn(self.module_path, resource_name))
-
- def has_resource(self, resource_name):
- return self._has(self._fn(self.module_path, resource_name))
-
- def has_metadata(self, name):
- return self.egg_info and self._has(self._fn(self.egg_info, name))
-
- if sys.version_info <= (3,):
- def get_metadata(self, name):
- if not self.egg_info:
- return ""
- return self._get(self._fn(self.egg_info, name))
- else:
- def get_metadata(self, name):
- if not self.egg_info:
- return ""
- return self._get(self._fn(self.egg_info, name)).decode("utf-8")
-
- def get_metadata_lines(self, name):
- return yield_lines(self.get_metadata(name))
-
- def resource_isdir(self, resource_name):
- return self._isdir(self._fn(self.module_path, resource_name))
-
- def metadata_isdir(self, name):
- return self.egg_info and self._isdir(self._fn(self.egg_info, name))
-
- def resource_listdir(self, resource_name):
- return self._listdir(self._fn(self.module_path, resource_name))
-
- def metadata_listdir(self, name):
- if self.egg_info:
- return self._listdir(self._fn(self.egg_info, name))
- return []
-
- def run_script(self, script_name, namespace):
- script = 'scripts/'+script_name
- if not self.has_metadata(script):
- raise ResolutionError("No script named %r" % script_name)
- script_text = self.get_metadata(script).replace('\r\n', '\n')
- script_text = script_text.replace('\r', '\n')
- script_filename = self._fn(self.egg_info, script)
- namespace['__file__'] = script_filename
- if os.path.exists(script_filename):
- source = open(script_filename).read()
- code = compile(source, script_filename, 'exec')
- exec(code, namespace, namespace)
- else:
- from linecache import cache
- cache[script_filename] = (
- len(script_text), 0, script_text.split('\n'), script_filename
- )
- script_code = compile(script_text, script_filename,'exec')
- exec(script_code, namespace, namespace)
-
- def _has(self, path):
- raise NotImplementedError(
- "Can't perform this operation for unregistered loader type"
- )
-
- def _isdir(self, path):
- raise NotImplementedError(
- "Can't perform this operation for unregistered loader type"
- )
-
- def _listdir(self, path):
- raise NotImplementedError(
- "Can't perform this operation for unregistered loader type"
- )
-
- def _fn(self, base, resource_name):
- if resource_name:
- return os.path.join(base, *resource_name.split('/'))
- return base
-
- def _get(self, path):
- if hasattr(self.loader, 'get_data'):
- return self.loader.get_data(path)
- raise NotImplementedError(
- "Can't perform this operation for loaders without 'get_data()'"
- )
-
-register_loader_type(object, NullProvider)
-
-
-class EggProvider(NullProvider):
- """Provider based on a virtual filesystem"""
-
- def __init__(self, module):
- NullProvider.__init__(self, module)
- self._setup_prefix()
-
- def _setup_prefix(self):
- # we assume here that our metadata may be nested inside a "basket"
- # of multiple eggs; that's why we use module_path instead of .archive
- path = self.module_path
- old = None
- while path!=old:
- if _is_unpacked_egg(path):
- self.egg_name = os.path.basename(path)
- self.egg_info = os.path.join(path, 'EGG-INFO')
- self.egg_root = path
- break
- old = path
- path, base = os.path.split(path)
-
-class DefaultProvider(EggProvider):
- """Provides access to package resources in the filesystem"""
-
- def _has(self, path):
- return os.path.exists(path)
-
- def _isdir(self, path):
- return os.path.isdir(path)
-
- def _listdir(self, path):
- return os.listdir(path)
-
- def get_resource_stream(self, manager, resource_name):
- return open(self._fn(self.module_path, resource_name), 'rb')
-
- def _get(self, path):
- with open(path, 'rb') as stream:
- return stream.read()
-
- @classmethod
- def _register(cls):
- loader_cls = getattr(importlib_machinery, 'SourceFileLoader',
- type(None))
- register_loader_type(loader_cls, cls)
-
-DefaultProvider._register()
-
-
-class EmptyProvider(NullProvider):
- """Provider that returns nothing for all requests"""
-
- _isdir = _has = lambda self, path: False
- _get = lambda self, path: ''
- _listdir = lambda self, path: []
- module_path = None
-
- def __init__(self):
- pass
-
-empty_provider = EmptyProvider()
-
-
-class ZipManifests(dict):
- """
- zip manifest builder
- """
-
- @classmethod
- def build(cls, path):
- """
- Build a dictionary similar to the zipimport directory
- caches, except instead of tuples, store ZipInfo objects.
-
- Use a platform-specific path separator (os.sep) for the path keys
- for compatibility with pypy on Windows.
- """
- with ContextualZipFile(path) as zfile:
- items = (
- (
- name.replace('/', os.sep),
- zfile.getinfo(name),
- )
- for name in zfile.namelist()
- )
- return dict(items)
-
- load = build
-
-
-class MemoizedZipManifests(ZipManifests):
- """
- Memoized zipfile manifests.
- """
- manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
-
- def load(self, path):
- """
- Load a manifest at path or return a suitable manifest already loaded.
- """
- path = os.path.normpath(path)
- mtime = os.stat(path).st_mtime
-
- if path not in self or self[path].mtime != mtime:
- manifest = self.build(path)
- self[path] = self.manifest_mod(manifest, mtime)
-
- return self[path].manifest
-
-
-class ContextualZipFile(zipfile.ZipFile):
- """
- Supplement ZipFile class to support context manager for Python 2.6
- """
-
- def __enter__(self):
- return self
-
- def __exit__(self, type, value, traceback):
- self.close()
-
- def __new__(cls, *args, **kwargs):
- """
- Construct a ZipFile or ContextualZipFile as appropriate
- """
- if hasattr(zipfile.ZipFile, '__exit__'):
- return zipfile.ZipFile(*args, **kwargs)
- return super(ContextualZipFile, cls).__new__(cls)
-
-
-class ZipProvider(EggProvider):
- """Resource support for zips and eggs"""
-
- eagers = None
- _zip_manifests = MemoizedZipManifests()
-
- def __init__(self, module):
- EggProvider.__init__(self, module)
- self.zip_pre = self.loader.archive+os.sep
-
- def _zipinfo_name(self, fspath):
- # Convert a virtual filename (full path to file) into a zipfile subpath
- # usable with the zipimport directory cache for our target archive
- if fspath.startswith(self.zip_pre):
- return fspath[len(self.zip_pre):]
- raise AssertionError(
- "%s is not a subpath of %s" % (fspath, self.zip_pre)
- )
-
- def _parts(self, zip_path):
- # Convert a zipfile subpath into an egg-relative path part list.
- # pseudo-fs path
- fspath = self.zip_pre+zip_path
- if fspath.startswith(self.egg_root+os.sep):
- return fspath[len(self.egg_root)+1:].split(os.sep)
- raise AssertionError(
- "%s is not a subpath of %s" % (fspath, self.egg_root)
- )
-
- @property
- def zipinfo(self):
- return self._zip_manifests.load(self.loader.archive)
-
- def get_resource_filename(self, manager, resource_name):
- if not self.egg_name:
- raise NotImplementedError(
- "resource_filename() only supported for .egg, not .zip"
- )
- # no need to lock for extraction, since we use temp names
- zip_path = self._resource_to_zip(resource_name)
- eagers = self._get_eager_resources()
- if '/'.join(self._parts(zip_path)) in eagers:
- for name in eagers:
- self._extract_resource(manager, self._eager_to_zip(name))
- return self._extract_resource(manager, zip_path)
-
- @staticmethod
- def _get_date_and_size(zip_stat):
- size = zip_stat.file_size
- # ymdhms+wday, yday, dst
- date_time = zip_stat.date_time + (0, 0, -1)
- # 1980 offset already done
- timestamp = time.mktime(date_time)
- return timestamp, size
-
- def _extract_resource(self, manager, zip_path):
-
- if zip_path in self._index():
- for name in self._index()[zip_path]:
- last = self._extract_resource(
- manager, os.path.join(zip_path, name)
- )
- # return the extracted directory name
- return os.path.dirname(last)
-
- timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
-
- if not WRITE_SUPPORT:
- raise IOError('"os.rename" and "os.unlink" are not supported '
- 'on this platform')
- try:
-
- real_path = manager.get_cache_path(
- self.egg_name, self._parts(zip_path)
- )
-
- if self._is_current(real_path, zip_path):
- return real_path
-
- outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
- os.write(outf, self.loader.get_data(zip_path))
- os.close(outf)
- utime(tmpnam, (timestamp, timestamp))
- manager.postprocess(tmpnam, real_path)
-
- try:
- rename(tmpnam, real_path)
-
- except os.error:
- if os.path.isfile(real_path):
- if self._is_current(real_path, zip_path):
- # the file became current since it was checked above,
- # so proceed.
- return real_path
- # Windows, del old file and retry
- elif os.name=='nt':
- unlink(real_path)
- rename(tmpnam, real_path)
- return real_path
- raise
-
- except os.error:
- # report a user-friendly error
- manager.extraction_error()
-
- return real_path
-
- def _is_current(self, file_path, zip_path):
- """
- Return True if the file_path is current for this zip_path
- """
- timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
- if not os.path.isfile(file_path):
- return False
- stat = os.stat(file_path)
- if stat.st_size!=size or stat.st_mtime!=timestamp:
- return False
- # check that the contents match
- zip_contents = self.loader.get_data(zip_path)
- with open(file_path, 'rb') as f:
- file_contents = f.read()
- return zip_contents == file_contents
-
- def _get_eager_resources(self):
- if self.eagers is None:
- eagers = []
- for name in ('native_libs.txt', 'eager_resources.txt'):
- if self.has_metadata(name):
- eagers.extend(self.get_metadata_lines(name))
- self.eagers = eagers
- return self.eagers
-
- def _index(self):
- try:
- return self._dirindex
- except AttributeError:
- ind = {}
- for path in self.zipinfo:
- parts = path.split(os.sep)
- while parts:
- parent = os.sep.join(parts[:-1])
- if parent in ind:
- ind[parent].append(parts[-1])
- break
- else:
- ind[parent] = [parts.pop()]
- self._dirindex = ind
- return ind
-
- def _has(self, fspath):
- zip_path = self._zipinfo_name(fspath)
- return zip_path in self.zipinfo or zip_path in self._index()
-
- def _isdir(self, fspath):
- return self._zipinfo_name(fspath) in self._index()
-
- def _listdir(self, fspath):
- return list(self._index().get(self._zipinfo_name(fspath), ()))
-
- def _eager_to_zip(self, resource_name):
- return self._zipinfo_name(self._fn(self.egg_root, resource_name))
-
- def _resource_to_zip(self, resource_name):
- return self._zipinfo_name(self._fn(self.module_path, resource_name))
-
-register_loader_type(zipimport.zipimporter, ZipProvider)
-
-
-class FileMetadata(EmptyProvider):
- """Metadata handler for standalone PKG-INFO files
-
- Usage::
-
- metadata = FileMetadata("/path/to/PKG-INFO")
-
- This provider rejects all data and metadata requests except for PKG-INFO,
- which is treated as existing, and will be the contents of the file at
- the provided location.
- """
-
- def __init__(self, path):
- self.path = path
-
- def has_metadata(self, name):
- return name=='PKG-INFO' and os.path.isfile(self.path)
-
- def get_metadata(self, name):
- if name=='PKG-INFO':
- with io.open(self.path, encoding='utf-8') as f:
- try:
- metadata = f.read()
- except UnicodeDecodeError as exc:
- # add path context to error message
- tmpl = " in {self.path}"
- exc.reason += tmpl.format(self=self)
- raise
- return metadata
- raise KeyError("No metadata except PKG-INFO is available")
-
- def get_metadata_lines(self, name):
- return yield_lines(self.get_metadata(name))
-
-
-class PathMetadata(DefaultProvider):
- """Metadata provider for egg directories
-
- Usage::
-
- # Development eggs:
-
- egg_info = "/path/to/PackageName.egg-info"
- base_dir = os.path.dirname(egg_info)
- metadata = PathMetadata(base_dir, egg_info)
- dist_name = os.path.splitext(os.path.basename(egg_info))[0]
- dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
-
- # Unpacked egg directories:
-
- egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
- metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
- dist = Distribution.from_filename(egg_path, metadata=metadata)
- """
-
- def __init__(self, path, egg_info):
- self.module_path = path
- self.egg_info = egg_info
-
-
-class EggMetadata(ZipProvider):
- """Metadata provider for .egg files"""
-
- def __init__(self, importer):
- """Create a metadata provider from a zipimporter"""
-
- self.zip_pre = importer.archive+os.sep
- self.loader = importer
- if importer.prefix:
- self.module_path = os.path.join(importer.archive, importer.prefix)
- else:
- self.module_path = importer.archive
- self._setup_prefix()
-
-_declare_state('dict', _distribution_finders = {})
-
-def register_finder(importer_type, distribution_finder):
- """Register `distribution_finder` to find distributions in sys.path items
-
- `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
- handler), and `distribution_finder` is a callable that, passed a path
- item and the importer instance, yields ``Distribution`` instances found on
- that path item. See ``pkg_resources.find_on_path`` for an example."""
- _distribution_finders[importer_type] = distribution_finder
-
-
-def find_distributions(path_item, only=False):
- """Yield distributions accessible via `path_item`"""
- importer = get_importer(path_item)
- finder = _find_adapter(_distribution_finders, importer)
- return finder(importer, path_item, only)
-
-def find_eggs_in_zip(importer, path_item, only=False):
- """
- Find eggs in zip files; possibly multiple nested eggs.
- """
- if importer.archive.endswith('.whl'):
- # wheels are not supported with this finder
- # they don't have PKG-INFO metadata, and won't ever contain eggs
- return
- metadata = EggMetadata(importer)
- if metadata.has_metadata('PKG-INFO'):
- yield Distribution.from_filename(path_item, metadata=metadata)
- if only:
- # don't yield nested distros
- return
- for subitem in metadata.resource_listdir('/'):
- if _is_unpacked_egg(subitem):
- subpath = os.path.join(path_item, subitem)
- for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath):
- yield dist
-
-register_finder(zipimport.zipimporter, find_eggs_in_zip)
-
-def find_nothing(importer, path_item, only=False):
- return ()
-register_finder(object, find_nothing)
-
-def find_on_path(importer, path_item, only=False):
- """Yield distributions accessible on a sys.path directory"""
- path_item = _normalize_cached(path_item)
-
- if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
- if _is_unpacked_egg(path_item):
- yield Distribution.from_filename(
- path_item, metadata=PathMetadata(
- path_item, os.path.join(path_item,'EGG-INFO')
- )
- )
- else:
- # scan for .egg and .egg-info in directory
- for entry in os.listdir(path_item):
- lower = entry.lower()
- if lower.endswith('.egg-info') or lower.endswith('.dist-info'):
- fullpath = os.path.join(path_item, entry)
- if os.path.isdir(fullpath):
- # egg-info directory, allow getting metadata
- metadata = PathMetadata(path_item, fullpath)
- else:
- metadata = FileMetadata(fullpath)
- yield Distribution.from_location(
- path_item, entry, metadata, precedence=DEVELOP_DIST
- )
- elif not only and _is_unpacked_egg(entry):
- dists = find_distributions(os.path.join(path_item, entry))
- for dist in dists:
- yield dist
- elif not only and lower.endswith('.egg-link'):
- with open(os.path.join(path_item, entry)) as entry_file:
- entry_lines = entry_file.readlines()
- for line in entry_lines:
- if not line.strip():
- continue
- path = os.path.join(path_item, line.rstrip())
- dists = find_distributions(path)
- for item in dists:
- yield item
- break
-register_finder(pkgutil.ImpImporter, find_on_path)
-
-if hasattr(importlib_machinery, 'FileFinder'):
- register_finder(importlib_machinery.FileFinder, find_on_path)
-
-_declare_state('dict', _namespace_handlers={})
-_declare_state('dict', _namespace_packages={})
-
-
-def register_namespace_handler(importer_type, namespace_handler):
- """Register `namespace_handler` to declare namespace packages
-
- `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
- handler), and `namespace_handler` is a callable like this::
-
- def namespace_handler(importer, path_entry, moduleName, module):
- # return a path_entry to use for child packages
-
- Namespace handlers are only called if the importer object has already
- agreed that it can handle the relevant path item, and they should only
- return a subpath if the module __path__ does not already contain an
- equivalent subpath. For an example namespace handler, see
- ``pkg_resources.file_ns_handler``.
- """
- _namespace_handlers[importer_type] = namespace_handler
-
-def _handle_ns(packageName, path_item):
- """Ensure that named package includes a subpath of path_item (if needed)"""
-
- importer = get_importer(path_item)
- if importer is None:
- return None
- loader = importer.find_module(packageName)
- if loader is None:
- return None
- module = sys.modules.get(packageName)
- if module is None:
- module = sys.modules[packageName] = types.ModuleType(packageName)
- module.__path__ = []
- _set_parent_ns(packageName)
- elif not hasattr(module,'__path__'):
- raise TypeError("Not a package:", packageName)
- handler = _find_adapter(_namespace_handlers, importer)
- subpath = handler(importer, path_item, packageName, module)
- if subpath is not None:
- path = module.__path__
- path.append(subpath)
- loader.load_module(packageName)
- _rebuild_mod_path(path, packageName, module)
- return subpath
-
-
-def _rebuild_mod_path(orig_path, package_name, module):
- """
- Rebuild module.__path__ ensuring that all entries are ordered
- corresponding to their sys.path order
- """
- sys_path = [_normalize_cached(p) for p in sys.path]
- def position_in_sys_path(path):
- """
- Return the ordinal of the path based on its position in sys.path
- """
- path_parts = path.split(os.sep)
- module_parts = package_name.count('.') + 1
- parts = path_parts[:-module_parts]
- return sys_path.index(_normalize_cached(os.sep.join(parts)))
-
- orig_path.sort(key=position_in_sys_path)
- module.__path__[:] = [_normalize_cached(p) for p in orig_path]
-
-
-def declare_namespace(packageName):
- """Declare that package 'packageName' is a namespace package"""
-
- _imp.acquire_lock()
- try:
- if packageName in _namespace_packages:
- return
-
- path, parent = sys.path, None
- if '.' in packageName:
- parent = '.'.join(packageName.split('.')[:-1])
- declare_namespace(parent)
- if parent not in _namespace_packages:
- __import__(parent)
- try:
- path = sys.modules[parent].__path__
- except AttributeError:
- raise TypeError("Not a package:", parent)
-
- # Track what packages are namespaces, so when new path items are added,
- # they can be updated
- _namespace_packages.setdefault(parent,[]).append(packageName)
- _namespace_packages.setdefault(packageName,[])
-
- for path_item in path:
- # Ensure all the parent's path items are reflected in the child,
- # if they apply
- _handle_ns(packageName, path_item)
-
- finally:
- _imp.release_lock()
-
-def fixup_namespace_packages(path_item, parent=None):
- """Ensure that previously-declared namespace packages include path_item"""
- _imp.acquire_lock()
- try:
- for package in _namespace_packages.get(parent,()):
- subpath = _handle_ns(package, path_item)
- if subpath:
- fixup_namespace_packages(subpath, package)
- finally:
- _imp.release_lock()
-
-def file_ns_handler(importer, path_item, packageName, module):
- """Compute an ns-package subpath for a filesystem or zipfile importer"""
-
- subpath = os.path.join(path_item, packageName.split('.')[-1])
- normalized = _normalize_cached(subpath)
- for item in module.__path__:
- if _normalize_cached(item)==normalized:
- break
- else:
- # Only return the path if it's not already there
- return subpath
-
-register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
-register_namespace_handler(zipimport.zipimporter, file_ns_handler)
-
-if hasattr(importlib_machinery, 'FileFinder'):
- register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
-
-
-def null_ns_handler(importer, path_item, packageName, module):
- return None
-
-register_namespace_handler(object, null_ns_handler)
-
-
-def normalize_path(filename):
- """Normalize a file/dir name for comparison purposes"""
- return os.path.normcase(os.path.realpath(filename))
-
-def _normalize_cached(filename, _cache={}):
- try:
- return _cache[filename]
- except KeyError:
- _cache[filename] = result = normalize_path(filename)
- return result
-
-def _is_unpacked_egg(path):
- """
- Determine if given path appears to be an unpacked egg.
- """
- return (
- path.lower().endswith('.egg')
- )
-
-def _set_parent_ns(packageName):
- parts = packageName.split('.')
- name = parts.pop()
- if parts:
- parent = '.'.join(parts)
- setattr(sys.modules[parent], name, sys.modules[packageName])
-
-
-def yield_lines(strs):
- """Yield non-empty/non-comment lines of a string or sequence"""
- if isinstance(strs, six.string_types):
- for s in strs.splitlines():
- s = s.strip()
- # skip blank lines/comments
- if s and not s.startswith('#'):
- yield s
- else:
- for ss in strs:
- for s in yield_lines(ss):
- yield s
-
-MODULE = re.compile(r"\w+(\.\w+)*$").match
-EGG_NAME = re.compile(
- r"""
- (?P<name>[^-]+) (
- -(?P<ver>[^-]+) (
- -py(?P<pyver>[^-]+) (
- -(?P<plat>.+)
- )?
- )?
- )?
- """,
- re.VERBOSE | re.IGNORECASE,
-).match
-
-
-class EntryPoint(object):
- """Object representing an advertised importable object"""
-
- def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
- if not MODULE(module_name):
- raise ValueError("Invalid module name", module_name)
- self.name = name
- self.module_name = module_name
- self.attrs = tuple(attrs)
- self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
- self.dist = dist
-
- def __str__(self):
- s = "%s = %s" % (self.name, self.module_name)
- if self.attrs:
- s += ':' + '.'.join(self.attrs)
- if self.extras:
- s += ' [%s]' % ','.join(self.extras)
- return s
-
- def __repr__(self):
- return "EntryPoint.parse(%r)" % str(self)
-
- def load(self, require=True, *args, **kwargs):
- """
- Require packages for this EntryPoint, then resolve it.
- """
- if not require or args or kwargs:
- warnings.warn(
- "Parameters to load are deprecated. Call .resolve and "
- ".require separately.",
- DeprecationWarning,
- stacklevel=2,
- )
- if require:
- self.require(*args, **kwargs)
- return self.resolve()
-
- def resolve(self):
- """
- Resolve the entry point from its module and attrs.
- """
- module = __import__(self.module_name, fromlist=['__name__'], level=0)
- try:
- return functools.reduce(getattr, self.attrs, module)
- except AttributeError as exc:
- raise ImportError(str(exc))
-
- def require(self, env=None, installer=None):
- if self.extras and not self.dist:
- raise UnknownExtra("Can't require() without a distribution", self)
- reqs = self.dist.requires(self.extras)
- items = working_set.resolve(reqs, env, installer)
- list(map(working_set.add, items))
-
- pattern = re.compile(
- r'\s*'
- r'(?P<name>.+?)\s*'
- r'=\s*'
- r'(?P<module>[\w.]+)\s*'
- r'(:\s*(?P<attr>[\w.]+))?\s*'
- r'(?P<extras>\[.*\])?\s*$'
- )
-
- @classmethod
- def parse(cls, src, dist=None):
- """Parse a single entry point from string `src`
-
- Entry point syntax follows the form::
-
- name = some.module:some.attr [extra1, extra2]
-
- The entry name and module name are required, but the ``:attrs`` and
- ``[extras]`` parts are optional
- """
- m = cls.pattern.match(src)
- if not m:
- msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
- raise ValueError(msg, src)
- res = m.groupdict()
- extras = cls._parse_extras(res['extras'])
- attrs = res['attr'].split('.') if res['attr'] else ()
- return cls(res['name'], res['module'], attrs, extras, dist)
-
- @classmethod
- def _parse_extras(cls, extras_spec):
- if not extras_spec:
- return ()
- req = Requirement.parse('x' + extras_spec)
- if req.specs:
- raise ValueError()
- return req.extras
-
- @classmethod
- def parse_group(cls, group, lines, dist=None):
- """Parse an entry point group"""
- if not MODULE(group):
- raise ValueError("Invalid group name", group)
- this = {}
- for line in yield_lines(lines):
- ep = cls.parse(line, dist)
- if ep.name in this:
- raise ValueError("Duplicate entry point", group, ep.name)
- this[ep.name]=ep
- return this
-
- @classmethod
- def parse_map(cls, data, dist=None):
- """Parse a map of entry point groups"""
- if isinstance(data, dict):
- data = data.items()
- else:
- data = split_sections(data)
- maps = {}
- for group, lines in data:
- if group is None:
- if not lines:
- continue
- raise ValueError("Entry points must be listed in groups")
- group = group.strip()
- if group in maps:
- raise ValueError("Duplicate group name", group)
- maps[group] = cls.parse_group(group, lines, dist)
- return maps
-
-
-def _remove_md5_fragment(location):
- if not location:
- return ''
- parsed = urllib.parse.urlparse(location)
- if parsed[-1].startswith('md5='):
- return urllib.parse.urlunparse(parsed[:-1] + ('',))
- return location
-
-
-def _version_from_file(lines):
- """
- Given an iterable of lines from a Metadata file, return
- the value of the Version field, if present, or None otherwise.
- """
- is_version_line = lambda line: line.lower().startswith('version:')
- version_lines = filter(is_version_line, lines)
- line = next(iter(version_lines), '')
- _, _, value = line.partition(':')
- return safe_version(value.strip()) or None
-
-
-class Distribution(object):
- """Wrap an actual or potential sys.path entry w/metadata"""
- PKG_INFO = 'PKG-INFO'
-
- def __init__(self, location=None, metadata=None, project_name=None,
- version=None, py_version=PY_MAJOR, platform=None,
- precedence=EGG_DIST):
- self.project_name = safe_name(project_name or 'Unknown')
- if version is not None:
- self._version = safe_version(version)
- self.py_version = py_version
- self.platform = platform
- self.location = location
- self.precedence = precedence
- self._provider = metadata or empty_provider
-
- @classmethod
- def from_location(cls, location, basename, metadata=None, **kw):
- project_name, version, py_version, platform = [None]*4
- basename, ext = os.path.splitext(basename)
- if ext.lower() in _distributionImpl:
- cls = _distributionImpl[ext.lower()]
-
- match = EGG_NAME(basename)
- if match:
- project_name, version, py_version, platform = match.group(
- 'name', 'ver', 'pyver', 'plat'
- )
- return cls(
- location, metadata, project_name=project_name, version=version,
- py_version=py_version, platform=platform, **kw
- )._reload_version()
-
- def _reload_version(self):
- return self
-
- @property
- def hashcmp(self):
- return (
- self.parsed_version,
- self.precedence,
- self.key,
- _remove_md5_fragment(self.location),
- self.py_version or '',
- self.platform or '',
- )
-
- def __hash__(self):
- return hash(self.hashcmp)
-
- def __lt__(self, other):
- return self.hashcmp < other.hashcmp
-
- def __le__(self, other):
- return self.hashcmp <= other.hashcmp
-
- def __gt__(self, other):
- return self.hashcmp > other.hashcmp
-
- def __ge__(self, other):
- return self.hashcmp >= other.hashcmp
-
- def __eq__(self, other):
- if not isinstance(other, self.__class__):
- # It's not a Distribution, so they are not equal
- return False
- return self.hashcmp == other.hashcmp
-
- def __ne__(self, other):
- return not self == other
-
- # These properties have to be lazy so that we don't have to load any
- # metadata until/unless it's actually needed. (i.e., some distributions
- # may not know their name or version without loading PKG-INFO)
-
- @property
- def key(self):
- try:
- return self._key
- except AttributeError:
- self._key = key = self.project_name.lower()
- return key
-
- @property
- def parsed_version(self):
- if not hasattr(self, "_parsed_version"):
- self._parsed_version = parse_version(self.version)
-
- return self._parsed_version
-
- def _warn_legacy_version(self):
- LV = packaging.version.LegacyVersion
- is_legacy = isinstance(self._parsed_version, LV)
- if not is_legacy:
- return
-
- # While an empty version is technically a legacy version and
- # is not a valid PEP 440 version, it's also unlikely to
- # actually come from someone and instead it is more likely that
- # it comes from setuptools attempting to parse a filename and
- # including it in the list. So for that we'll gate this warning
- # on if the version is anything at all or not.
- if not self.version:
- return
-
- tmpl = textwrap.dedent("""
- '{project_name} ({version})' is being parsed as a legacy,
- non PEP 440,
- version. You may find odd behavior and sort order.
- In particular it will be sorted as less than 0.0. It
- is recommended to migrate to PEP 440 compatible
- versions.
- """).strip().replace('\n', ' ')
-
- warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
-
- @property
- def version(self):
- try:
- return self._version
- except AttributeError:
- version = _version_from_file(self._get_metadata(self.PKG_INFO))
- if version is None:
- tmpl = "Missing 'Version:' header and/or %s file"
- raise ValueError(tmpl % self.PKG_INFO, self)
- return version
-
- @property
- def _dep_map(self):
- try:
- return self.__dep_map
- except AttributeError:
- dm = self.__dep_map = {None: []}
- for name in 'requires.txt', 'depends.txt':
- for extra, reqs in split_sections(self._get_metadata(name)):
- if extra:
- if ':' in extra:
- extra, marker = extra.split(':', 1)
- if invalid_marker(marker):
- # XXX warn
- reqs=[]
- elif not evaluate_marker(marker):
- reqs=[]
- extra = safe_extra(extra) or None
- dm.setdefault(extra,[]).extend(parse_requirements(reqs))
- return dm
-
- def requires(self, extras=()):
- """List of Requirements needed for this distro if `extras` are used"""
- dm = self._dep_map
- deps = []
- deps.extend(dm.get(None, ()))
- for ext in extras:
- try:
- deps.extend(dm[safe_extra(ext)])
- except KeyError:
- raise UnknownExtra(
- "%s has no such extra feature %r" % (self, ext)
- )
- return deps
-
- def _get_metadata(self, name):
- if self.has_metadata(name):
- for line in self.get_metadata_lines(name):
- yield line
-
- def activate(self, path=None):
- """Ensure distribution is importable on `path` (default=sys.path)"""
- if path is None:
- path = sys.path
- self.insert_on(path, replace=True)
- if path is sys.path:
- fixup_namespace_packages(self.location)
- for pkg in self._get_metadata('namespace_packages.txt'):
- if pkg in sys.modules:
- declare_namespace(pkg)
-
- def egg_name(self):
- """Return what this distribution's standard .egg filename should be"""
- filename = "%s-%s-py%s" % (
- to_filename(self.project_name), to_filename(self.version),
- self.py_version or PY_MAJOR
- )
-
- if self.platform:
- filename += '-' + self.platform
- return filename
-
- def __repr__(self):
- if self.location:
- return "%s (%s)" % (self, self.location)
- else:
- return str(self)
-
- def __str__(self):
- try:
- version = getattr(self, 'version', None)
- except ValueError:
- version = None
- version = version or "[unknown version]"
- return "%s %s" % (self.project_name, version)
-
- def __getattr__(self, attr):
- """Delegate all unrecognized public attributes to .metadata provider"""
- if attr.startswith('_'):
- raise AttributeError(attr)
- return getattr(self._provider, attr)
-
- @classmethod
- def from_filename(cls, filename, metadata=None, **kw):
- return cls.from_location(
- _normalize_cached(filename), os.path.basename(filename), metadata,
- **kw
- )
-
- def as_requirement(self):
- """Return a ``Requirement`` that matches this distribution exactly"""
- if isinstance(self.parsed_version, packaging.version.Version):
- spec = "%s==%s" % (self.project_name, self.parsed_version)
- else:
- spec = "%s===%s" % (self.project_name, self.parsed_version)
-
- return Requirement.parse(spec)
-
- def load_entry_point(self, group, name):
- """Return the `name` entry point of `group` or raise ImportError"""
- ep = self.get_entry_info(group, name)
- if ep is None:
- raise ImportError("Entry point %r not found" % ((group, name),))
- return ep.load()
-
- def get_entry_map(self, group=None):
- """Return the entry point map for `group`, or the full entry map"""
- try:
- ep_map = self._ep_map
- except AttributeError:
- ep_map = self._ep_map = EntryPoint.parse_map(
- self._get_metadata('entry_points.txt'), self
- )
- if group is not None:
- return ep_map.get(group,{})
- return ep_map
-
- def get_entry_info(self, group, name):
- """Return the EntryPoint object for `group`+`name`, or ``None``"""
- return self.get_entry_map(group).get(name)
-
- def insert_on(self, path, loc=None, replace=False):
- """Insert self.location in path before its nearest parent directory"""
-
- loc = loc or self.location
- if not loc:
- return
-
- nloc = _normalize_cached(loc)
- bdir = os.path.dirname(nloc)
- npath= [(p and _normalize_cached(p) or p) for p in path]
-
- for p, item in enumerate(npath):
- if item == nloc:
- break
- elif item == bdir and self.precedence == EGG_DIST:
- # if it's an .egg, give it precedence over its directory
- if path is sys.path:
- self.check_version_conflict()
- path.insert(p, loc)
- npath.insert(p, nloc)
- break
- else:
- if path is sys.path:
- self.check_version_conflict()
- if replace:
- path.insert(0, loc)
- else:
- path.append(loc)
- return
-
- # p is the spot where we found or inserted loc; now remove duplicates
- while True:
- try:
- np = npath.index(nloc, p+1)
- except ValueError:
- break
- else:
- del npath[np], path[np]
- # ha!
- p = np
-
- return
-
- def check_version_conflict(self):
- if self.key == 'setuptools':
- # ignore the inevitable setuptools self-conflicts :(
- return
-
- nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
- loc = normalize_path(self.location)
- for modname in self._get_metadata('top_level.txt'):
- if (modname not in sys.modules or modname in nsp
- or modname in _namespace_packages):
- continue
- if modname in ('pkg_resources', 'setuptools', 'site'):
- continue
- fn = getattr(sys.modules[modname], '__file__', None)
- if fn and (normalize_path(fn).startswith(loc) or
- fn.startswith(self.location)):
- continue
- issue_warning(
- "Module %s was already imported from %s, but %s is being added"
- " to sys.path" % (modname, fn, self.location),
- )
-
- def has_version(self):
- try:
- self.version
- except ValueError:
- issue_warning("Unbuilt egg for " + repr(self))
- return False
- return True
-
- def clone(self,**kw):
- """Copy this distribution, substituting in any changed keyword args"""
- names = 'project_name version py_version platform location precedence'
- for attr in names.split():
- kw.setdefault(attr, getattr(self, attr, None))
- kw.setdefault('metadata', self._provider)
- return self.__class__(**kw)
-
- @property
- def extras(self):
- return [dep for dep in self._dep_map if dep]
-
-
-class EggInfoDistribution(Distribution):
-
- def _reload_version(self):
- """
- Packages installed by distutils (e.g. numpy or scipy),
- which uses an old safe_version, and so
- their version numbers can get mangled when
- converted to filenames (e.g., 1.11.0.dev0+2329eae to
- 1.11.0.dev0_2329eae). These distributions will not be
- parsed properly
- downstream by Distribution and safe_version, so
- take an extra step and try to get the version number from
- the metadata file itself instead of the filename.
- """
- md_version = _version_from_file(self._get_metadata(self.PKG_INFO))
- if md_version:
- self._version = md_version
- return self
-
-
-class DistInfoDistribution(Distribution):
- """Wrap an actual or potential sys.path entry w/metadata, .dist-info style"""
- PKG_INFO = 'METADATA'
- EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
-
- @property
- def _parsed_pkg_info(self):
- """Parse and cache metadata"""
- try:
- return self._pkg_info
- except AttributeError:
- metadata = self.get_metadata(self.PKG_INFO)
- self._pkg_info = email.parser.Parser().parsestr(metadata)
- return self._pkg_info
-
- @property
- def _dep_map(self):
- try:
- return self.__dep_map
- except AttributeError:
- self.__dep_map = self._compute_dependencies()
- return self.__dep_map
-
- def _compute_dependencies(self):
- """Recompute this distribution's dependencies."""
- dm = self.__dep_map = {None: []}
-
- reqs = []
- # Including any condition expressions
- for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
- reqs.extend(parse_requirements(req))
-
- def reqs_for_extra(extra):
- for req in reqs:
- if not req.marker or req.marker.evaluate({'extra': extra}):
- yield req
-
- common = frozenset(reqs_for_extra(None))
- dm[None].extend(common)
-
- for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
- extra = safe_extra(extra.strip())
- dm[extra] = list(frozenset(reqs_for_extra(extra)) - common)
-
- return dm
-
-
-_distributionImpl = {
- '.egg': Distribution,
- '.egg-info': EggInfoDistribution,
- '.dist-info': DistInfoDistribution,
- }
-
-
-def issue_warning(*args,**kw):
- level = 1
- g = globals()
- try:
- # find the first stack frame that is *not* code in
- # the pkg_resources module, to use for the warning
- while sys._getframe(level).f_globals is g:
- level += 1
- except ValueError:
- pass
- warnings.warn(stacklevel=level + 1, *args, **kw)
-
-
-class RequirementParseError(ValueError):
- def __str__(self):
- return ' '.join(self.args)
-
-
-def parse_requirements(strs):
- """Yield ``Requirement`` objects for each specification in `strs`
-
- `strs` must be a string, or a (possibly-nested) iterable thereof.
- """
- # create a steppable iterator, so we can handle \-continuations
- lines = iter(yield_lines(strs))
-
- for line in lines:
- # Drop comments -- a hash without a space may be in a URL.
- if ' #' in line:
- line = line[:line.find(' #')]
- # If there is a line continuation, drop it, and append the next line.
- if line.endswith('\\'):
- line = line[:-2].strip()
- line += next(lines)
- yield Requirement(line)
-
-
-class Requirement(packaging.requirements.Requirement):
- def __init__(self, requirement_string):
- """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
- try:
- super(Requirement, self).__init__(requirement_string)
- except packaging.requirements.InvalidRequirement as e:
- raise RequirementParseError(str(e))
- self.unsafe_name = self.name
- project_name = safe_name(self.name)
- self.project_name, self.key = project_name, project_name.lower()
- self.specs = [
- (spec.operator, spec.version) for spec in self.specifier]
- self.extras = tuple(map(safe_extra, self.extras))
- self.hashCmp = (
- self.key,
- self.specifier,
- frozenset(self.extras),
- str(self.marker) if self.marker else None,
- )
- self.__hash = hash(self.hashCmp)
-
- def __eq__(self, other):
- return (
- isinstance(other, Requirement) and
- self.hashCmp == other.hashCmp
- )
-
- def __ne__(self, other):
- return not self == other
-
- def __contains__(self, item):
- if isinstance(item, Distribution):
- if item.key != self.key:
- return False
-
- item = item.version
-
- # Allow prereleases always in order to match the previous behavior of
- # this method. In the future this should be smarter and follow PEP 440
- # more accurately.
- return self.specifier.contains(item, prereleases=True)
-
- def __hash__(self):
- return self.__hash
-
- def __repr__(self): return "Requirement.parse(%r)" % str(self)
-
- @staticmethod
- def parse(s):
- req, = parse_requirements(s)
- return req
-
-
-def _get_mro(cls):
- """Get an mro for a type or classic class"""
- if not isinstance(cls, type):
- class cls(cls, object): pass
- return cls.__mro__[1:]
- return cls.__mro__
-
-def _find_adapter(registry, ob):
- """Return an adapter factory for `ob` from `registry`"""
- for t in _get_mro(getattr(ob, '__class__', type(ob))):
- if t in registry:
- return registry[t]
-
-
-def ensure_directory(path):
- """Ensure that the parent directory of `path` exists"""
- dirname = os.path.dirname(path)
- if not os.path.isdir(dirname):
- os.makedirs(dirname)
-
-
-def _bypass_ensure_directory(path):
- """Sandbox-bypassing version of ensure_directory()"""
- if not WRITE_SUPPORT:
- raise IOError('"os.mkdir" not supported on this platform.')
- dirname, filename = split(path)
- if dirname and filename and not isdir(dirname):
- _bypass_ensure_directory(dirname)
- mkdir(dirname, 0o755)
-
-
-def split_sections(s):
- """Split a string or iterable thereof into (section, content) pairs
-
- Each ``section`` is a stripped version of the section header ("[section]")
- and each ``content`` is a list of stripped lines excluding blank lines and
- comment-only lines. If there are any such lines before the first section
- header, they're returned in a first ``section`` of ``None``.
- """
- section = None
- content = []
- for line in yield_lines(s):
- if line.startswith("["):
- if line.endswith("]"):
- if section or content:
- yield section, content
- section = line[1:-1].strip()
- content = []
- else:
- raise ValueError("Invalid section heading", line)
- else:
- content.append(line)
-
- # wrap up last segment
- yield section, content
-
-def _mkstemp(*args,**kw):
- old_open = os.open
- try:
- # temporarily bypass sandboxing
- os.open = os_open
- return tempfile.mkstemp(*args,**kw)
- finally:
- # and then put it back
- os.open = old_open
-
-
-# Silence the PEP440Warning by default, so that end users don't get hit by it
-# randomly just because they use pkg_resources. We want to append the rule
-# because we want earlier uses of filterwarnings to take precedence over this
-# one.
-warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
-
-
-# from jaraco.functools 1.3
-def _call_aside(f, *args, **kwargs):
- f(*args, **kwargs)
- return f
-
-
-@_call_aside
-def _initialize(g=globals()):
- "Set up global resource manager (deliberately not state-saved)"
- manager = ResourceManager()
- g['_manager'] = manager
- for name in dir(manager):
- if not name.startswith('_'):
- g[name] = getattr(manager, name)
-
-
-@_call_aside
-def _initialize_master_working_set():
- """
- Prepare the master working set and make the ``require()``
- API available.
-
- This function has explicit effects on the global state
- of pkg_resources. It is intended to be invoked once at
- the initialization of this module.
-
- Invocation by other packages is unsupported and done
- at their own risk.
- """
- working_set = WorkingSet._build_master()
- _declare_state('object', working_set=working_set)
-
- require = working_set.require
- iter_entry_points = working_set.iter_entry_points
- add_activation_listener = working_set.subscribe
- run_script = working_set.run_script
- # backward compatibility
- run_main = run_script
- # Activate all distributions already on sys.path, and ensure that
- # all distributions added to the working set in the future (e.g. by
- # calling ``require()``) will get activated as well.
- add_activation_listener(lambda dist: dist.activate())
- working_set.entries=[]
- # match order
- list(map(working_set.add_entry, sys.path))
- globals().update(locals())
diff --git a/pkg_resources/_vendor/__init__.py b/pkg_resources/_vendor/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/pkg_resources/_vendor/__init__.py
+++ /dev/null
diff --git a/pkg_resources/_vendor/packaging/__about__.py b/pkg_resources/_vendor/packaging/__about__.py
deleted file mode 100644
index c21a758b..00000000
--- a/pkg_resources/_vendor/packaging/__about__.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-__all__ = [
- "__title__", "__summary__", "__uri__", "__version__", "__author__",
- "__email__", "__license__", "__copyright__",
-]
-
-__title__ = "packaging"
-__summary__ = "Core utilities for Python packages"
-__uri__ = "https://github.com/pypa/packaging"
-
-__version__ = "16.7"
-
-__author__ = "Donald Stufft and individual contributors"
-__email__ = "donald@stufft.io"
-
-__license__ = "BSD or Apache License, Version 2.0"
-__copyright__ = "Copyright 2014-2016 %s" % __author__
diff --git a/pkg_resources/_vendor/packaging/__init__.py b/pkg_resources/_vendor/packaging/__init__.py
deleted file mode 100644
index 5ee62202..00000000
--- a/pkg_resources/_vendor/packaging/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-from .__about__ import (
- __author__, __copyright__, __email__, __license__, __summary__, __title__,
- __uri__, __version__
-)
-
-__all__ = [
- "__title__", "__summary__", "__uri__", "__version__", "__author__",
- "__email__", "__license__", "__copyright__",
-]
diff --git a/pkg_resources/_vendor/packaging/_compat.py b/pkg_resources/_vendor/packaging/_compat.py
deleted file mode 100644
index 210bb80b..00000000
--- a/pkg_resources/_vendor/packaging/_compat.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-import sys
-
-
-PY2 = sys.version_info[0] == 2
-PY3 = sys.version_info[0] == 3
-
-# flake8: noqa
-
-if PY3:
- string_types = str,
-else:
- string_types = basestring,
-
-
-def with_metaclass(meta, *bases):
- """
- Create a base class with a metaclass.
- """
- # This requires a bit of explanation: the basic idea is to make a dummy
- # metaclass for one level of class instantiation that replaces itself with
- # the actual metaclass.
- class metaclass(meta):
- def __new__(cls, name, this_bases, d):
- return meta(name, bases, d)
- return type.__new__(metaclass, 'temporary_class', (), {})
diff --git a/pkg_resources/_vendor/packaging/_structures.py b/pkg_resources/_vendor/packaging/_structures.py
deleted file mode 100644
index ccc27861..00000000
--- a/pkg_resources/_vendor/packaging/_structures.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-
-class Infinity(object):
-
- def __repr__(self):
- return "Infinity"
-
- def __hash__(self):
- return hash(repr(self))
-
- def __lt__(self, other):
- return False
-
- def __le__(self, other):
- return False
-
- def __eq__(self, other):
- return isinstance(other, self.__class__)
-
- def __ne__(self, other):
- return not isinstance(other, self.__class__)
-
- def __gt__(self, other):
- return True
-
- def __ge__(self, other):
- return True
-
- def __neg__(self):
- return NegativeInfinity
-
-Infinity = Infinity()
-
-
-class NegativeInfinity(object):
-
- def __repr__(self):
- return "-Infinity"
-
- def __hash__(self):
- return hash(repr(self))
-
- def __lt__(self, other):
- return True
-
- def __le__(self, other):
- return True
-
- def __eq__(self, other):
- return isinstance(other, self.__class__)
-
- def __ne__(self, other):
- return not isinstance(other, self.__class__)
-
- def __gt__(self, other):
- return False
-
- def __ge__(self, other):
- return False
-
- def __neg__(self):
- return Infinity
-
-NegativeInfinity = NegativeInfinity()
diff --git a/pkg_resources/_vendor/packaging/markers.py b/pkg_resources/_vendor/packaging/markers.py
deleted file mode 100644
index c5d29cd9..00000000
--- a/pkg_resources/_vendor/packaging/markers.py
+++ /dev/null
@@ -1,287 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-import operator
-import os
-import platform
-import sys
-
-from pkg_resources.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd
-from pkg_resources.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString
-from pkg_resources.extern.pyparsing import Literal as L # noqa
-
-from ._compat import string_types
-from .specifiers import Specifier, InvalidSpecifier
-
-
-__all__ = [
- "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName",
- "Marker", "default_environment",
-]
-
-
-class InvalidMarker(ValueError):
- """
- An invalid marker was found, users should refer to PEP 508.
- """
-
-
-class UndefinedComparison(ValueError):
- """
- An invalid operation was attempted on a value that doesn't support it.
- """
-
-
-class UndefinedEnvironmentName(ValueError):
- """
- A name was attempted to be used that does not exist inside of the
- environment.
- """
-
-
-class Node(object):
-
- def __init__(self, value):
- self.value = value
-
- def __str__(self):
- return str(self.value)
-
- def __repr__(self):
- return "<{0}({1!r})>".format(self.__class__.__name__, str(self))
-
-
-class Variable(Node):
- pass
-
-
-class Value(Node):
- pass
-
-
-VARIABLE = (
- L("implementation_version") |
- L("platform_python_implementation") |
- L("implementation_name") |
- L("python_full_version") |
- L("platform_release") |
- L("platform_version") |
- L("platform_machine") |
- L("platform_system") |
- L("python_version") |
- L("sys_platform") |
- L("os_name") |
- L("os.name") | # PEP-345
- L("sys.platform") | # PEP-345
- L("platform.version") | # PEP-345
- L("platform.machine") | # PEP-345
- L("platform.python_implementation") | # PEP-345
- L("python_implementation") | # undocumented setuptools legacy
- L("extra")
-)
-ALIASES = {
- 'os.name': 'os_name',
- 'sys.platform': 'sys_platform',
- 'platform.version': 'platform_version',
- 'platform.machine': 'platform_machine',
- 'platform.python_implementation': 'platform_python_implementation',
- 'python_implementation': 'platform_python_implementation'
-}
-VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
-
-VERSION_CMP = (
- L("===") |
- L("==") |
- L(">=") |
- L("<=") |
- L("!=") |
- L("~=") |
- L(">") |
- L("<")
-)
-
-MARKER_OP = VERSION_CMP | L("not in") | L("in")
-
-MARKER_VALUE = QuotedString("'") | QuotedString('"')
-MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
-
-BOOLOP = L("and") | L("or")
-
-MARKER_VAR = VARIABLE | MARKER_VALUE
-
-MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
-MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
-
-LPAREN = L("(").suppress()
-RPAREN = L(")").suppress()
-
-MARKER_EXPR = Forward()
-MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
-MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
-
-MARKER = stringStart + MARKER_EXPR + stringEnd
-
-
-def _coerce_parse_result(results):
- if isinstance(results, ParseResults):
- return [_coerce_parse_result(i) for i in results]
- else:
- return results
-
-
-def _format_marker(marker, first=True):
- assert isinstance(marker, (list, tuple, string_types))
-
- # Sometimes we have a structure like [[...]] which is a single item list
- # where the single item is itself it's own list. In that case we want skip
- # the rest of this function so that we don't get extraneous () on the
- # outside.
- if (isinstance(marker, list) and len(marker) == 1 and
- isinstance(marker[0], (list, tuple))):
- return _format_marker(marker[0])
-
- if isinstance(marker, list):
- inner = (_format_marker(m, first=False) for m in marker)
- if first:
- return " ".join(inner)
- else:
- return "(" + " ".join(inner) + ")"
- elif isinstance(marker, tuple):
- return '{0} {1} "{2}"'.format(*marker)
- else:
- return marker
-
-
-_operators = {
- "in": lambda lhs, rhs: lhs in rhs,
- "not in": lambda lhs, rhs: lhs not in rhs,
- "<": operator.lt,
- "<=": operator.le,
- "==": operator.eq,
- "!=": operator.ne,
- ">=": operator.ge,
- ">": operator.gt,
-}
-
-
-def _eval_op(lhs, op, rhs):
- try:
- spec = Specifier("".join([op, rhs]))
- except InvalidSpecifier:
- pass
- else:
- return spec.contains(lhs)
-
- oper = _operators.get(op)
- if oper is None:
- raise UndefinedComparison(
- "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
- )
-
- return oper(lhs, rhs)
-
-
-_undefined = object()
-
-
-def _get_env(environment, name):
- value = environment.get(name, _undefined)
-
- if value is _undefined:
- raise UndefinedEnvironmentName(
- "{0!r} does not exist in evaluation environment.".format(name)
- )
-
- return value
-
-
-def _evaluate_markers(markers, environment):
- groups = [[]]
-
- for marker in markers:
- assert isinstance(marker, (list, tuple, string_types))
-
- if isinstance(marker, list):
- groups[-1].append(_evaluate_markers(marker, environment))
- elif isinstance(marker, tuple):
- lhs, op, rhs = marker
-
- if isinstance(lhs, Variable):
- lhs_value = _get_env(environment, lhs.value)
- rhs_value = rhs.value
- else:
- lhs_value = lhs.value
- rhs_value = _get_env(environment, rhs.value)
-
- groups[-1].append(_eval_op(lhs_value, op, rhs_value))
- else:
- assert marker in ["and", "or"]
- if marker == "or":
- groups.append([])
-
- return any(all(item) for item in groups)
-
-
-def format_full_version(info):
- version = '{0.major}.{0.minor}.{0.micro}'.format(info)
- kind = info.releaselevel
- if kind != 'final':
- version += kind[0] + str(info.serial)
- return version
-
-
-def default_environment():
- if hasattr(sys, 'implementation'):
- iver = format_full_version(sys.implementation.version)
- implementation_name = sys.implementation.name
- else:
- iver = '0'
- implementation_name = ''
-
- return {
- "implementation_name": implementation_name,
- "implementation_version": iver,
- "os_name": os.name,
- "platform_machine": platform.machine(),
- "platform_release": platform.release(),
- "platform_system": platform.system(),
- "platform_version": platform.version(),
- "python_full_version": platform.python_version(),
- "platform_python_implementation": platform.python_implementation(),
- "python_version": platform.python_version()[:3],
- "sys_platform": sys.platform,
- }
-
-
-class Marker(object):
-
- def __init__(self, marker):
- try:
- self._markers = _coerce_parse_result(MARKER.parseString(marker))
- except ParseException as e:
- err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
- marker, marker[e.loc:e.loc + 8])
- raise InvalidMarker(err_str)
-
- def __str__(self):
- return _format_marker(self._markers)
-
- def __repr__(self):
- return "<Marker({0!r})>".format(str(self))
-
- def evaluate(self, environment=None):
- """Evaluate a marker.
-
- Return the boolean from evaluating the given marker against the
- environment. environment is an optional argument to override all or
- part of the determined environment.
-
- The environment is determined from the current Python process.
- """
- current_environment = default_environment()
- if environment is not None:
- current_environment.update(environment)
-
- return _evaluate_markers(self._markers, current_environment)
diff --git a/pkg_resources/_vendor/packaging/requirements.py b/pkg_resources/_vendor/packaging/requirements.py
deleted file mode 100644
index 0c8c4a38..00000000
--- a/pkg_resources/_vendor/packaging/requirements.py
+++ /dev/null
@@ -1,127 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-import string
-import re
-
-from pkg_resources.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException
-from pkg_resources.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine
-from pkg_resources.extern.pyparsing import Literal as L # noqa
-from pkg_resources.extern.six.moves.urllib import parse as urlparse
-
-from .markers import MARKER_EXPR, Marker
-from .specifiers import LegacySpecifier, Specifier, SpecifierSet
-
-
-class InvalidRequirement(ValueError):
- """
- An invalid requirement was found, users should refer to PEP 508.
- """
-
-
-ALPHANUM = Word(string.ascii_letters + string.digits)
-
-LBRACKET = L("[").suppress()
-RBRACKET = L("]").suppress()
-LPAREN = L("(").suppress()
-RPAREN = L(")").suppress()
-COMMA = L(",").suppress()
-SEMICOLON = L(";").suppress()
-AT = L("@").suppress()
-
-PUNCTUATION = Word("-_.")
-IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
-IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
-
-NAME = IDENTIFIER("name")
-EXTRA = IDENTIFIER
-
-URI = Regex(r'[^ ]+')("url")
-URL = (AT + URI)
-
-EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
-EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
-
-VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
-VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
-
-VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
-VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),
- joinString=",", adjacent=False)("_raw_spec")
-_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
-_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')
-
-VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
-VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
-
-MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
-MARKER_EXPR.setParseAction(
- lambda s, l, t: Marker(s[t._original_start:t._original_end])
-)
-MARKER_SEPERATOR = SEMICOLON
-MARKER = MARKER_SEPERATOR + MARKER_EXPR
-
-VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
-URL_AND_MARKER = URL + Optional(MARKER)
-
-NAMED_REQUIREMENT = \
- NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
-
-REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
-
-
-class Requirement(object):
- """Parse a requirement.
-
- Parse a given requirement string into its parts, such as name, specifier,
- URL, and extras. Raises InvalidRequirement on a badly-formed requirement
- string.
- """
-
- # TODO: Can we test whether something is contained within a requirement?
- # If so how do we do that? Do we need to test against the _name_ of
- # the thing as well as the version? What about the markers?
- # TODO: Can we normalize the name and extra name?
-
- def __init__(self, requirement_string):
- try:
- req = REQUIREMENT.parseString(requirement_string)
- except ParseException as e:
- raise InvalidRequirement(
- "Invalid requirement, parse error at \"{0!r}\"".format(
- requirement_string[e.loc:e.loc + 8]))
-
- self.name = req.name
- if req.url:
- parsed_url = urlparse.urlparse(req.url)
- if not (parsed_url.scheme and parsed_url.netloc) or (
- not parsed_url.scheme and not parsed_url.netloc):
- raise InvalidRequirement("Invalid URL given")
- self.url = req.url
- else:
- self.url = None
- self.extras = set(req.extras.asList() if req.extras else [])
- self.specifier = SpecifierSet(req.specifier)
- self.marker = req.marker if req.marker else None
-
- def __str__(self):
- parts = [self.name]
-
- if self.extras:
- parts.append("[{0}]".format(",".join(sorted(self.extras))))
-
- if self.specifier:
- parts.append(str(self.specifier))
-
- if self.url:
- parts.append("@ {0}".format(self.url))
-
- if self.marker:
- parts.append("; {0}".format(self.marker))
-
- return "".join(parts)
-
- def __repr__(self):
- return "<Requirement({0!r})>".format(str(self))
diff --git a/pkg_resources/_vendor/packaging/specifiers.py b/pkg_resources/_vendor/packaging/specifiers.py
deleted file mode 100644
index 7f5a76cf..00000000
--- a/pkg_resources/_vendor/packaging/specifiers.py
+++ /dev/null
@@ -1,774 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-import abc
-import functools
-import itertools
-import re
-
-from ._compat import string_types, with_metaclass
-from .version import Version, LegacyVersion, parse
-
-
-class InvalidSpecifier(ValueError):
- """
- An invalid specifier was found, users should refer to PEP 440.
- """
-
-
-class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
-
- @abc.abstractmethod
- def __str__(self):
- """
- Returns the str representation of this Specifier like object. This
- should be representative of the Specifier itself.
- """
-
- @abc.abstractmethod
- def __hash__(self):
- """
- Returns a hash value for this Specifier like object.
- """
-
- @abc.abstractmethod
- def __eq__(self, other):
- """
- Returns a boolean representing whether or not the two Specifier like
- objects are equal.
- """
-
- @abc.abstractmethod
- def __ne__(self, other):
- """
- Returns a boolean representing whether or not the two Specifier like
- objects are not equal.
- """
-
- @abc.abstractproperty
- def prereleases(self):
- """
- Returns whether or not pre-releases as a whole are allowed by this
- specifier.
- """
-
- @prereleases.setter
- def prereleases(self, value):
- """
- Sets whether or not pre-releases as a whole are allowed by this
- specifier.
- """
-
- @abc.abstractmethod
- def contains(self, item, prereleases=None):
- """
- Determines if the given item is contained within this specifier.
- """
-
- @abc.abstractmethod
- def filter(self, iterable, prereleases=None):
- """
- Takes an iterable of items and filters them so that only items which
- are contained within this specifier are allowed in it.
- """
-
-
-class _IndividualSpecifier(BaseSpecifier):
-
- _operators = {}
-
- def __init__(self, spec="", prereleases=None):
- match = self._regex.search(spec)
- if not match:
- raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
-
- self._spec = (
- match.group("operator").strip(),
- match.group("version").strip(),
- )
-
- # Store whether or not this Specifier should accept prereleases
- self._prereleases = prereleases
-
- def __repr__(self):
- pre = (
- ", prereleases={0!r}".format(self.prereleases)
- if self._prereleases is not None
- else ""
- )
-
- return "<{0}({1!r}{2})>".format(
- self.__class__.__name__,
- str(self),
- pre,
- )
-
- def __str__(self):
- return "{0}{1}".format(*self._spec)
-
- def __hash__(self):
- return hash(self._spec)
-
- def __eq__(self, other):
- if isinstance(other, string_types):
- try:
- other = self.__class__(other)
- except InvalidSpecifier:
- return NotImplemented
- elif not isinstance(other, self.__class__):
- return NotImplemented
-
- return self._spec == other._spec
-
- def __ne__(self, other):
- if isinstance(other, string_types):
- try:
- other = self.__class__(other)
- except InvalidSpecifier:
- return NotImplemented
- elif not isinstance(other, self.__class__):
- return NotImplemented
-
- return self._spec != other._spec
-
- def _get_operator(self, op):
- return getattr(self, "_compare_{0}".format(self._operators[op]))
-
- def _coerce_version(self, version):
- if not isinstance(version, (LegacyVersion, Version)):
- version = parse(version)
- return version
-
- @property
- def operator(self):
- return self._spec[0]
-
- @property
- def version(self):
- return self._spec[1]
-
- @property
- def prereleases(self):
- return self._prereleases
-
- @prereleases.setter
- def prereleases(self, value):
- self._prereleases = value
-
- def __contains__(self, item):
- return self.contains(item)
-
- def contains(self, item, prereleases=None):
- # Determine if prereleases are to be allowed or not.
- if prereleases is None:
- prereleases = self.prereleases
-
- # Normalize item to a Version or LegacyVersion, this allows us to have
- # a shortcut for ``"2.0" in Specifier(">=2")
- item = self._coerce_version(item)
-
- # Determine if we should be supporting prereleases in this specifier
- # or not, if we do not support prereleases than we can short circuit
- # logic if this version is a prereleases.
- if item.is_prerelease and not prereleases:
- return False
-
- # Actually do the comparison to determine if this item is contained
- # within this Specifier or not.
- return self._get_operator(self.operator)(item, self.version)
-
- def filter(self, iterable, prereleases=None):
- yielded = False
- found_prereleases = []
-
- kw = {"prereleases": prereleases if prereleases is not None else True}
-
- # Attempt to iterate over all the values in the iterable and if any of
- # them match, yield them.
- for version in iterable:
- parsed_version = self._coerce_version(version)
-
- if self.contains(parsed_version, **kw):
- # If our version is a prerelease, and we were not set to allow
- # prereleases, then we'll store it for later incase nothing
- # else matches this specifier.
- if (parsed_version.is_prerelease and not
- (prereleases or self.prereleases)):
- found_prereleases.append(version)
- # Either this is not a prerelease, or we should have been
- # accepting prereleases from the begining.
- else:
- yielded = True
- yield version
-
- # Now that we've iterated over everything, determine if we've yielded
- # any values, and if we have not and we have any prereleases stored up
- # then we will go ahead and yield the prereleases.
- if not yielded and found_prereleases:
- for version in found_prereleases:
- yield version
-
-
-class LegacySpecifier(_IndividualSpecifier):
-
- _regex_str = (
- r"""
- (?P<operator>(==|!=|<=|>=|<|>))
- \s*
- (?P<version>
- [^,;\s)]* # Since this is a "legacy" specifier, and the version
- # string can be just about anything, we match everything
- # except for whitespace, a semi-colon for marker support,
- # a closing paren since versions can be enclosed in
- # them, and a comma since it's a version separator.
- )
- """
- )
-
- _regex = re.compile(
- r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
-
- _operators = {
- "==": "equal",
- "!=": "not_equal",
- "<=": "less_than_equal",
- ">=": "greater_than_equal",
- "<": "less_than",
- ">": "greater_than",
- }
-
- def _coerce_version(self, version):
- if not isinstance(version, LegacyVersion):
- version = LegacyVersion(str(version))
- return version
-
- def _compare_equal(self, prospective, spec):
- return prospective == self._coerce_version(spec)
-
- def _compare_not_equal(self, prospective, spec):
- return prospective != self._coerce_version(spec)
-
- def _compare_less_than_equal(self, prospective, spec):
- return prospective <= self._coerce_version(spec)
-
- def _compare_greater_than_equal(self, prospective, spec):
- return prospective >= self._coerce_version(spec)
-
- def _compare_less_than(self, prospective, spec):
- return prospective < self._coerce_version(spec)
-
- def _compare_greater_than(self, prospective, spec):
- return prospective > self._coerce_version(spec)
-
-
-def _require_version_compare(fn):
- @functools.wraps(fn)
- def wrapped(self, prospective, spec):
- if not isinstance(prospective, Version):
- return False
- return fn(self, prospective, spec)
- return wrapped
-
-
-class Specifier(_IndividualSpecifier):
-
- _regex_str = (
- r"""
- (?P<operator>(~=|==|!=|<=|>=|<|>|===))
- (?P<version>
- (?:
- # The identity operators allow for an escape hatch that will
- # do an exact string match of the version you wish to install.
- # This will not be parsed by PEP 440 and we cannot determine
- # any semantic meaning from it. This operator is discouraged
- # but included entirely as an escape hatch.
- (?<====) # Only match for the identity operator
- \s*
- [^\s]* # We just match everything, except for whitespace
- # since we are only testing for strict identity.
- )
- |
- (?:
- # The (non)equality operators allow for wild card and local
- # versions to be specified so we have to define these two
- # operators separately to enable that.
- (?<===|!=) # Only match for equals and not equals
-
- \s*
- v?
- (?:[0-9]+!)? # epoch
- [0-9]+(?:\.[0-9]+)* # release
- (?: # pre release
- [-_\.]?
- (a|b|c|rc|alpha|beta|pre|preview)
- [-_\.]?
- [0-9]*
- )?
- (?: # post release
- (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
- )?
-
- # You cannot use a wild card and a dev or local version
- # together so group them with a | and make them optional.
- (?:
- (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
- (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
- |
- \.\* # Wild card syntax of .*
- )?
- )
- |
- (?:
- # The compatible operator requires at least two digits in the
- # release segment.
- (?<=~=) # Only match for the compatible operator
-
- \s*
- v?
- (?:[0-9]+!)? # epoch
- [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
- (?: # pre release
- [-_\.]?
- (a|b|c|rc|alpha|beta|pre|preview)
- [-_\.]?
- [0-9]*
- )?
- (?: # post release
- (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
- )?
- (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
- )
- |
- (?:
- # All other operators only allow a sub set of what the
- # (non)equality operators do. Specifically they do not allow
- # local versions to be specified nor do they allow the prefix
- # matching wild cards.
- (?<!==|!=|~=) # We have special cases for these
- # operators so we want to make sure they
- # don't match here.
-
- \s*
- v?
- (?:[0-9]+!)? # epoch
- [0-9]+(?:\.[0-9]+)* # release
- (?: # pre release
- [-_\.]?
- (a|b|c|rc|alpha|beta|pre|preview)
- [-_\.]?
- [0-9]*
- )?
- (?: # post release
- (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
- )?
- (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
- )
- )
- """
- )
-
- _regex = re.compile(
- r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
-
- _operators = {
- "~=": "compatible",
- "==": "equal",
- "!=": "not_equal",
- "<=": "less_than_equal",
- ">=": "greater_than_equal",
- "<": "less_than",
- ">": "greater_than",
- "===": "arbitrary",
- }
-
- @_require_version_compare
- def _compare_compatible(self, prospective, spec):
- # Compatible releases have an equivalent combination of >= and ==. That
- # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
- # implement this in terms of the other specifiers instead of
- # implementing it ourselves. The only thing we need to do is construct
- # the other specifiers.
-
- # We want everything but the last item in the version, but we want to
- # ignore post and dev releases and we want to treat the pre-release as
- # it's own separate segment.
- prefix = ".".join(
- list(
- itertools.takewhile(
- lambda x: (not x.startswith("post") and not
- x.startswith("dev")),
- _version_split(spec),
- )
- )[:-1]
- )
-
- # Add the prefix notation to the end of our string
- prefix += ".*"
-
- return (self._get_operator(">=")(prospective, spec) and
- self._get_operator("==")(prospective, prefix))
-
- @_require_version_compare
- def _compare_equal(self, prospective, spec):
- # We need special logic to handle prefix matching
- if spec.endswith(".*"):
- # In the case of prefix matching we want to ignore local segment.
- prospective = Version(prospective.public)
- # Split the spec out by dots, and pretend that there is an implicit
- # dot in between a release segment and a pre-release segment.
- spec = _version_split(spec[:-2]) # Remove the trailing .*
-
- # Split the prospective version out by dots, and pretend that there
- # is an implicit dot in between a release segment and a pre-release
- # segment.
- prospective = _version_split(str(prospective))
-
- # Shorten the prospective version to be the same length as the spec
- # so that we can determine if the specifier is a prefix of the
- # prospective version or not.
- prospective = prospective[:len(spec)]
-
- # Pad out our two sides with zeros so that they both equal the same
- # length.
- spec, prospective = _pad_version(spec, prospective)
- else:
- # Convert our spec string into a Version
- spec = Version(spec)
-
- # If the specifier does not have a local segment, then we want to
- # act as if the prospective version also does not have a local
- # segment.
- if not spec.local:
- prospective = Version(prospective.public)
-
- return prospective == spec
-
- @_require_version_compare
- def _compare_not_equal(self, prospective, spec):
- return not self._compare_equal(prospective, spec)
-
- @_require_version_compare
- def _compare_less_than_equal(self, prospective, spec):
- return prospective <= Version(spec)
-
- @_require_version_compare
- def _compare_greater_than_equal(self, prospective, spec):
- return prospective >= Version(spec)
-
- @_require_version_compare
- def _compare_less_than(self, prospective, spec):
- # Convert our spec to a Version instance, since we'll want to work with
- # it as a version.
- spec = Version(spec)
-
- # Check to see if the prospective version is less than the spec
- # version. If it's not we can short circuit and just return False now
- # instead of doing extra unneeded work.
- if not prospective < spec:
- return False
-
- # This special case is here so that, unless the specifier itself
- # includes is a pre-release version, that we do not accept pre-release
- # versions for the version mentioned in the specifier (e.g. <3.1 should
- # not match 3.1.dev0, but should match 3.0.dev0).
- if not spec.is_prerelease and prospective.is_prerelease:
- if Version(prospective.base_version) == Version(spec.base_version):
- return False
-
- # If we've gotten to here, it means that prospective version is both
- # less than the spec version *and* it's not a pre-release of the same
- # version in the spec.
- return True
-
- @_require_version_compare
- def _compare_greater_than(self, prospective, spec):
- # Convert our spec to a Version instance, since we'll want to work with
- # it as a version.
- spec = Version(spec)
-
- # Check to see if the prospective version is greater than the spec
- # version. If it's not we can short circuit and just return False now
- # instead of doing extra unneeded work.
- if not prospective > spec:
- return False
-
- # This special case is here so that, unless the specifier itself
- # includes is a post-release version, that we do not accept
- # post-release versions for the version mentioned in the specifier
- # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
- if not spec.is_postrelease and prospective.is_postrelease:
- if Version(prospective.base_version) == Version(spec.base_version):
- return False
-
- # Ensure that we do not allow a local version of the version mentioned
- # in the specifier, which is techincally greater than, to match.
- if prospective.local is not None:
- if Version(prospective.base_version) == Version(spec.base_version):
- return False
-
- # If we've gotten to here, it means that prospective version is both
- # greater than the spec version *and* it's not a pre-release of the
- # same version in the spec.
- return True
-
- def _compare_arbitrary(self, prospective, spec):
- return str(prospective).lower() == str(spec).lower()
-
- @property
- def prereleases(self):
- # If there is an explicit prereleases set for this, then we'll just
- # blindly use that.
- if self._prereleases is not None:
- return self._prereleases
-
- # Look at all of our specifiers and determine if they are inclusive
- # operators, and if they are if they are including an explicit
- # prerelease.
- operator, version = self._spec
- if operator in ["==", ">=", "<=", "~=", "==="]:
- # The == specifier can include a trailing .*, if it does we
- # want to remove before parsing.
- if operator == "==" and version.endswith(".*"):
- version = version[:-2]
-
- # Parse the version, and if it is a pre-release than this
- # specifier allows pre-releases.
- if parse(version).is_prerelease:
- return True
-
- return False
-
- @prereleases.setter
- def prereleases(self, value):
- self._prereleases = value
-
-
-_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
-
-
-def _version_split(version):
- result = []
- for item in version.split("."):
- match = _prefix_regex.search(item)
- if match:
- result.extend(match.groups())
- else:
- result.append(item)
- return result
-
-
-def _pad_version(left, right):
- left_split, right_split = [], []
-
- # Get the release segment of our versions
- left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
- right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
-
- # Get the rest of our versions
- left_split.append(left[len(left_split[0]):])
- right_split.append(right[len(right_split[0]):])
-
- # Insert our padding
- left_split.insert(
- 1,
- ["0"] * max(0, len(right_split[0]) - len(left_split[0])),
- )
- right_split.insert(
- 1,
- ["0"] * max(0, len(left_split[0]) - len(right_split[0])),
- )
-
- return (
- list(itertools.chain(*left_split)),
- list(itertools.chain(*right_split)),
- )
-
-
-class SpecifierSet(BaseSpecifier):
-
- def __init__(self, specifiers="", prereleases=None):
- # Split on , to break each indidivual specifier into it's own item, and
- # strip each item to remove leading/trailing whitespace.
- specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
-
- # Parsed each individual specifier, attempting first to make it a
- # Specifier and falling back to a LegacySpecifier.
- parsed = set()
- for specifier in specifiers:
- try:
- parsed.add(Specifier(specifier))
- except InvalidSpecifier:
- parsed.add(LegacySpecifier(specifier))
-
- # Turn our parsed specifiers into a frozen set and save them for later.
- self._specs = frozenset(parsed)
-
- # Store our prereleases value so we can use it later to determine if
- # we accept prereleases or not.
- self._prereleases = prereleases
-
- def __repr__(self):
- pre = (
- ", prereleases={0!r}".format(self.prereleases)
- if self._prereleases is not None
- else ""
- )
-
- return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
-
- def __str__(self):
- return ",".join(sorted(str(s) for s in self._specs))
-
- def __hash__(self):
- return hash(self._specs)
-
- def __and__(self, other):
- if isinstance(other, string_types):
- other = SpecifierSet(other)
- elif not isinstance(other, SpecifierSet):
- return NotImplemented
-
- specifier = SpecifierSet()
- specifier._specs = frozenset(self._specs | other._specs)
-
- if self._prereleases is None and other._prereleases is not None:
- specifier._prereleases = other._prereleases
- elif self._prereleases is not None and other._prereleases is None:
- specifier._prereleases = self._prereleases
- elif self._prereleases == other._prereleases:
- specifier._prereleases = self._prereleases
- else:
- raise ValueError(
- "Cannot combine SpecifierSets with True and False prerelease "
- "overrides."
- )
-
- return specifier
-
- def __eq__(self, other):
- if isinstance(other, string_types):
- other = SpecifierSet(other)
- elif isinstance(other, _IndividualSpecifier):
- other = SpecifierSet(str(other))
- elif not isinstance(other, SpecifierSet):
- return NotImplemented
-
- return self._specs == other._specs
-
- def __ne__(self, other):
- if isinstance(other, string_types):
- other = SpecifierSet(other)
- elif isinstance(other, _IndividualSpecifier):
- other = SpecifierSet(str(other))
- elif not isinstance(other, SpecifierSet):
- return NotImplemented
-
- return self._specs != other._specs
-
- def __len__(self):
- return len(self._specs)
-
- def __iter__(self):
- return iter(self._specs)
-
- @property
- def prereleases(self):
- # If we have been given an explicit prerelease modifier, then we'll
- # pass that through here.
- if self._prereleases is not None:
- return self._prereleases
-
- # If we don't have any specifiers, and we don't have a forced value,
- # then we'll just return None since we don't know if this should have
- # pre-releases or not.
- if not self._specs:
- return None
-
- # Otherwise we'll see if any of the given specifiers accept
- # prereleases, if any of them do we'll return True, otherwise False.
- return any(s.prereleases for s in self._specs)
-
- @prereleases.setter
- def prereleases(self, value):
- self._prereleases = value
-
- def __contains__(self, item):
- return self.contains(item)
-
- def contains(self, item, prereleases=None):
- # Ensure that our item is a Version or LegacyVersion instance.
- if not isinstance(item, (LegacyVersion, Version)):
- item = parse(item)
-
- # Determine if we're forcing a prerelease or not, if we're not forcing
- # one for this particular filter call, then we'll use whatever the
- # SpecifierSet thinks for whether or not we should support prereleases.
- if prereleases is None:
- prereleases = self.prereleases
-
- # We can determine if we're going to allow pre-releases by looking to
- # see if any of the underlying items supports them. If none of them do
- # and this item is a pre-release then we do not allow it and we can
- # short circuit that here.
- # Note: This means that 1.0.dev1 would not be contained in something
- # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
- if not prereleases and item.is_prerelease:
- return False
-
- # We simply dispatch to the underlying specs here to make sure that the
- # given version is contained within all of them.
- # Note: This use of all() here means that an empty set of specifiers
- # will always return True, this is an explicit design decision.
- return all(
- s.contains(item, prereleases=prereleases)
- for s in self._specs
- )
-
- def filter(self, iterable, prereleases=None):
- # Determine if we're forcing a prerelease or not, if we're not forcing
- # one for this particular filter call, then we'll use whatever the
- # SpecifierSet thinks for whether or not we should support prereleases.
- if prereleases is None:
- prereleases = self.prereleases
-
- # If we have any specifiers, then we want to wrap our iterable in the
- # filter method for each one, this will act as a logical AND amongst
- # each specifier.
- if self._specs:
- for spec in self._specs:
- iterable = spec.filter(iterable, prereleases=bool(prereleases))
- return iterable
- # If we do not have any specifiers, then we need to have a rough filter
- # which will filter out any pre-releases, unless there are no final
- # releases, and which will filter out LegacyVersion in general.
- else:
- filtered = []
- found_prereleases = []
-
- for item in iterable:
- # Ensure that we some kind of Version class for this item.
- if not isinstance(item, (LegacyVersion, Version)):
- parsed_version = parse(item)
- else:
- parsed_version = item
-
- # Filter out any item which is parsed as a LegacyVersion
- if isinstance(parsed_version, LegacyVersion):
- continue
-
- # Store any item which is a pre-release for later unless we've
- # already found a final version or we are accepting prereleases
- if parsed_version.is_prerelease and not prereleases:
- if not filtered:
- found_prereleases.append(item)
- else:
- filtered.append(item)
-
- # If we've found no items except for pre-releases, then we'll go
- # ahead and use the pre-releases
- if not filtered and found_prereleases and prereleases is None:
- return found_prereleases
-
- return filtered
diff --git a/pkg_resources/_vendor/packaging/utils.py b/pkg_resources/_vendor/packaging/utils.py
deleted file mode 100644
index 942387ce..00000000
--- a/pkg_resources/_vendor/packaging/utils.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-import re
-
-
-_canonicalize_regex = re.compile(r"[-_.]+")
-
-
-def canonicalize_name(name):
- # This is taken from PEP 503.
- return _canonicalize_regex.sub("-", name).lower()
diff --git a/pkg_resources/_vendor/packaging/version.py b/pkg_resources/_vendor/packaging/version.py
deleted file mode 100644
index 83b5ee8c..00000000
--- a/pkg_resources/_vendor/packaging/version.py
+++ /dev/null
@@ -1,393 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-import collections
-import itertools
-import re
-
-from ._structures import Infinity
-
-
-__all__ = [
- "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
-]
-
-
-_Version = collections.namedtuple(
- "_Version",
- ["epoch", "release", "dev", "pre", "post", "local"],
-)
-
-
-def parse(version):
- """
- Parse the given version string and return either a :class:`Version` object
- or a :class:`LegacyVersion` object depending on if the given version is
- a valid PEP 440 version or a legacy version.
- """
- try:
- return Version(version)
- except InvalidVersion:
- return LegacyVersion(version)
-
-
-class InvalidVersion(ValueError):
- """
- An invalid version was found, users should refer to PEP 440.
- """
-
-
-class _BaseVersion(object):
-
- def __hash__(self):
- return hash(self._key)
-
- def __lt__(self, other):
- return self._compare(other, lambda s, o: s < o)
-
- def __le__(self, other):
- return self._compare(other, lambda s, o: s <= o)
-
- def __eq__(self, other):
- return self._compare(other, lambda s, o: s == o)
-
- def __ge__(self, other):
- return self._compare(other, lambda s, o: s >= o)
-
- def __gt__(self, other):
- return self._compare(other, lambda s, o: s > o)
-
- def __ne__(self, other):
- return self._compare(other, lambda s, o: s != o)
-
- def _compare(self, other, method):
- if not isinstance(other, _BaseVersion):
- return NotImplemented
-
- return method(self._key, other._key)
-
-
-class LegacyVersion(_BaseVersion):
-
- def __init__(self, version):
- self._version = str(version)
- self._key = _legacy_cmpkey(self._version)
-
- def __str__(self):
- return self._version
-
- def __repr__(self):
- return "<LegacyVersion({0})>".format(repr(str(self)))
-
- @property
- def public(self):
- return self._version
-
- @property
- def base_version(self):
- return self._version
-
- @property
- def local(self):
- return None
-
- @property
- def is_prerelease(self):
- return False
-
- @property
- def is_postrelease(self):
- return False
-
-
-_legacy_version_component_re = re.compile(
- r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
-)
-
-_legacy_version_replacement_map = {
- "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
-}
-
-
-def _parse_version_parts(s):
- for part in _legacy_version_component_re.split(s):
- part = _legacy_version_replacement_map.get(part, part)
-
- if not part or part == ".":
- continue
-
- if part[:1] in "0123456789":
- # pad for numeric comparison
- yield part.zfill(8)
- else:
- yield "*" + part
-
- # ensure that alpha/beta/candidate are before final
- yield "*final"
-
-
-def _legacy_cmpkey(version):
- # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
- # greater than or equal to 0. This will effectively put the LegacyVersion,
- # which uses the defacto standard originally implemented by setuptools,
- # as before all PEP 440 versions.
- epoch = -1
-
- # This scheme is taken from pkg_resources.parse_version setuptools prior to
- # it's adoption of the packaging library.
- parts = []
- for part in _parse_version_parts(version.lower()):
- if part.startswith("*"):
- # remove "-" before a prerelease tag
- if part < "*final":
- while parts and parts[-1] == "*final-":
- parts.pop()
-
- # remove trailing zeros from each series of numeric parts
- while parts and parts[-1] == "00000000":
- parts.pop()
-
- parts.append(part)
- parts = tuple(parts)
-
- return epoch, parts
-
-# Deliberately not anchored to the start and end of the string, to make it
-# easier for 3rd party code to reuse
-VERSION_PATTERN = r"""
- v?
- (?:
- (?:(?P<epoch>[0-9]+)!)? # epoch
- (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
- (?P<pre> # pre-release
- [-_\.]?
- (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
- [-_\.]?
- (?P<pre_n>[0-9]+)?
- )?
- (?P<post> # post release
- (?:-(?P<post_n1>[0-9]+))
- |
- (?:
- [-_\.]?
- (?P<post_l>post|rev|r)
- [-_\.]?
- (?P<post_n2>[0-9]+)?
- )
- )?
- (?P<dev> # dev release
- [-_\.]?
- (?P<dev_l>dev)
- [-_\.]?
- (?P<dev_n>[0-9]+)?
- )?
- )
- (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
-"""
-
-
-class Version(_BaseVersion):
-
- _regex = re.compile(
- r"^\s*" + VERSION_PATTERN + r"\s*$",
- re.VERBOSE | re.IGNORECASE,
- )
-
- def __init__(self, version):
- # Validate the version and parse it into pieces
- match = self._regex.search(version)
- if not match:
- raise InvalidVersion("Invalid version: '{0}'".format(version))
-
- # Store the parsed out pieces of the version
- self._version = _Version(
- epoch=int(match.group("epoch")) if match.group("epoch") else 0,
- release=tuple(int(i) for i in match.group("release").split(".")),
- pre=_parse_letter_version(
- match.group("pre_l"),
- match.group("pre_n"),
- ),
- post=_parse_letter_version(
- match.group("post_l"),
- match.group("post_n1") or match.group("post_n2"),
- ),
- dev=_parse_letter_version(
- match.group("dev_l"),
- match.group("dev_n"),
- ),
- local=_parse_local_version(match.group("local")),
- )
-
- # Generate a key which will be used for sorting
- self._key = _cmpkey(
- self._version.epoch,
- self._version.release,
- self._version.pre,
- self._version.post,
- self._version.dev,
- self._version.local,
- )
-
- def __repr__(self):
- return "<Version({0})>".format(repr(str(self)))
-
- def __str__(self):
- parts = []
-
- # Epoch
- if self._version.epoch != 0:
- parts.append("{0}!".format(self._version.epoch))
-
- # Release segment
- parts.append(".".join(str(x) for x in self._version.release))
-
- # Pre-release
- if self._version.pre is not None:
- parts.append("".join(str(x) for x in self._version.pre))
-
- # Post-release
- if self._version.post is not None:
- parts.append(".post{0}".format(self._version.post[1]))
-
- # Development release
- if self._version.dev is not None:
- parts.append(".dev{0}".format(self._version.dev[1]))
-
- # Local version segment
- if self._version.local is not None:
- parts.append(
- "+{0}".format(".".join(str(x) for x in self._version.local))
- )
-
- return "".join(parts)
-
- @property
- def public(self):
- return str(self).split("+", 1)[0]
-
- @property
- def base_version(self):
- parts = []
-
- # Epoch
- if self._version.epoch != 0:
- parts.append("{0}!".format(self._version.epoch))
-
- # Release segment
- parts.append(".".join(str(x) for x in self._version.release))
-
- return "".join(parts)
-
- @property
- def local(self):
- version_string = str(self)
- if "+" in version_string:
- return version_string.split("+", 1)[1]
-
- @property
- def is_prerelease(self):
- return bool(self._version.dev or self._version.pre)
-
- @property
- def is_postrelease(self):
- return bool(self._version.post)
-
-
-def _parse_letter_version(letter, number):
- if letter:
- # We consider there to be an implicit 0 in a pre-release if there is
- # not a numeral associated with it.
- if number is None:
- number = 0
-
- # We normalize any letters to their lower case form
- letter = letter.lower()
-
- # We consider some words to be alternate spellings of other words and
- # in those cases we want to normalize the spellings to our preferred
- # spelling.
- if letter == "alpha":
- letter = "a"
- elif letter == "beta":
- letter = "b"
- elif letter in ["c", "pre", "preview"]:
- letter = "rc"
- elif letter in ["rev", "r"]:
- letter = "post"
-
- return letter, int(number)
- if not letter and number:
- # We assume if we are given a number, but we are not given a letter
- # then this is using the implicit post release syntax (e.g. 1.0-1)
- letter = "post"
-
- return letter, int(number)
-
-
-_local_version_seperators = re.compile(r"[\._-]")
-
-
-def _parse_local_version(local):
- """
- Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
- """
- if local is not None:
- return tuple(
- part.lower() if not part.isdigit() else int(part)
- for part in _local_version_seperators.split(local)
- )
-
-
-def _cmpkey(epoch, release, pre, post, dev, local):
- # When we compare a release version, we want to compare it with all of the
- # trailing zeros removed. So we'll use a reverse the list, drop all the now
- # leading zeros until we come to something non zero, then take the rest
- # re-reverse it back into the correct order and make it a tuple and use
- # that for our sorting key.
- release = tuple(
- reversed(list(
- itertools.dropwhile(
- lambda x: x == 0,
- reversed(release),
- )
- ))
- )
-
- # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
- # We'll do this by abusing the pre segment, but we _only_ want to do this
- # if there is not a pre or a post segment. If we have one of those then
- # the normal sorting rules will handle this case correctly.
- if pre is None and post is None and dev is not None:
- pre = -Infinity
- # Versions without a pre-release (except as noted above) should sort after
- # those with one.
- elif pre is None:
- pre = Infinity
-
- # Versions without a post segment should sort before those with one.
- if post is None:
- post = -Infinity
-
- # Versions without a development segment should sort after those with one.
- if dev is None:
- dev = Infinity
-
- if local is None:
- # Versions without a local segment should sort before those with one.
- local = -Infinity
- else:
- # Versions with a local segment need that segment parsed to implement
- # the sorting rules in PEP440.
- # - Alpha numeric segments sort before numeric segments
- # - Alpha numeric segments sort lexicographically
- # - Numeric segments sort numerically
- # - Shorter versions sort before longer versions when the prefixes
- # match exactly
- local = tuple(
- (i, "") if isinstance(i, int) else (-Infinity, i)
- for i in local
- )
-
- return epoch, release, pre, post, dev, local
diff --git a/pkg_resources/_vendor/pyparsing.py b/pkg_resources/_vendor/pyparsing.py
deleted file mode 100644
index 3e02dbee..00000000
--- a/pkg_resources/_vendor/pyparsing.py
+++ /dev/null
@@ -1,3805 +0,0 @@
-# module pyparsing.py
-#
-# Copyright (c) 2003-2015 Paul T. McGuire
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-#
-
-__doc__ = \
-"""
-pyparsing module - Classes and methods to define and execute parsing grammars
-
-The pyparsing module is an alternative approach to creating and executing simple grammars,
-vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you
-don't need to learn a new syntax for defining grammars or matching expressions - the parsing module
-provides a library of classes that you use to construct the grammar directly in Python.
-
-Here is a program to parse "Hello, World!" (or any greeting of the form C{"<salutation>, <addressee>!"})::
-
- from pyparsing import Word, alphas
-
- # define grammar of a greeting
- greet = Word( alphas ) + "," + Word( alphas ) + "!"
-
- hello = "Hello, World!"
- print (hello, "->", greet.parseString( hello ))
-
-The program outputs the following::
-
- Hello, World! -> ['Hello', ',', 'World', '!']
-
-The Python representation of the grammar is quite readable, owing to the self-explanatory
-class names, and the use of '+', '|' and '^' operators.
-
-The parsed results returned from C{parseString()} can be accessed as a nested list, a dictionary, or an
-object with named attributes.
-
-The pyparsing module handles some of the problems that are typically vexing when writing text parsers:
- - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.)
- - quoted strings
- - embedded comments
-"""
-
-__version__ = "2.0.6"
-__versionTime__ = "9 Nov 2015 19:03"
-__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>"
-
-import string
-from weakref import ref as wkref
-import copy
-import sys
-import warnings
-import re
-import sre_constants
-import collections
-import pprint
-import functools
-import itertools
-
-#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) )
-
-__all__ = [
-'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty',
-'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal',
-'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or',
-'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException',
-'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException',
-'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', 'Upcase',
-'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore',
-'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col',
-'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString',
-'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums',
-'htmlComment', 'javaStyleComment', 'keepOriginalText', 'line', 'lineEnd', 'lineStart', 'lineno',
-'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral',
-'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables',
-'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity',
-'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd',
-'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute',
-'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass',
-]
-
-PY_3 = sys.version.startswith('3')
-if PY_3:
- _MAX_INT = sys.maxsize
- basestring = str
- unichr = chr
- _ustr = str
-
- # build list of single arg builtins, that can be used as parse actions
- singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max]
-
-else:
- _MAX_INT = sys.maxint
- range = xrange
-
- def _ustr(obj):
- """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries
- str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It
- then < returns the unicode object | encodes it with the default encoding | ... >.
- """
- if isinstance(obj,unicode):
- return obj
-
- try:
- # If this works, then _ustr(obj) has the same behaviour as str(obj), so
- # it won't break any existing code.
- return str(obj)
-
- except UnicodeEncodeError:
- # The Python docs (http://docs.python.org/ref/customization.html#l2h-182)
- # state that "The return value must be a string object". However, does a
- # unicode object (being a subclass of basestring) count as a "string
- # object"?
- # If so, then return a unicode object:
- return unicode(obj)
- # Else encode it... but how? There are many choices... :)
- # Replace unprintables with escape codes?
- #return unicode(obj).encode(sys.getdefaultencoding(), 'backslashreplace_errors')
- # Replace unprintables with question marks?
- #return unicode(obj).encode(sys.getdefaultencoding(), 'replace')
- # ...
-
- # build list of single arg builtins, tolerant of Python version, that can be used as parse actions
- singleArgBuiltins = []
- import __builtin__
- for fname in "sum len sorted reversed list tuple set any all min max".split():
- try:
- singleArgBuiltins.append(getattr(__builtin__,fname))
- except AttributeError:
- continue
-
-_generatorType = type((y for y in range(1)))
-
-def _xml_escape(data):
- """Escape &, <, >, ", ', etc. in a string of data."""
-
- # ampersand must be replaced first
- from_symbols = '&><"\''
- to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split())
- for from_,to_ in zip(from_symbols, to_symbols):
- data = data.replace(from_, to_)
- return data
-
-class _Constants(object):
- pass
-
-alphas = string.ascii_lowercase + string.ascii_uppercase
-nums = "0123456789"
-hexnums = nums + "ABCDEFabcdef"
-alphanums = alphas + nums
-_bslash = chr(92)
-printables = "".join(c for c in string.printable if c not in string.whitespace)
-
-class ParseBaseException(Exception):
- """base exception class for all parsing runtime exceptions"""
- # Performance tuning: we construct a *lot* of these, so keep this
- # constructor as small and fast as possible
- def __init__( self, pstr, loc=0, msg=None, elem=None ):
- self.loc = loc
- if msg is None:
- self.msg = pstr
- self.pstr = ""
- else:
- self.msg = msg
- self.pstr = pstr
- self.parserElement = elem
-
- def __getattr__( self, aname ):
- """supported attributes by name are:
- - lineno - returns the line number of the exception text
- - col - returns the column number of the exception text
- - line - returns the line containing the exception text
- """
- if( aname == "lineno" ):
- return lineno( self.loc, self.pstr )
- elif( aname in ("col", "column") ):
- return col( self.loc, self.pstr )
- elif( aname == "line" ):
- return line( self.loc, self.pstr )
- else:
- raise AttributeError(aname)
-
- def __str__( self ):
- return "%s (at char %d), (line:%d, col:%d)" % \
- ( self.msg, self.loc, self.lineno, self.column )
- def __repr__( self ):
- return _ustr(self)
- def markInputline( self, markerString = ">!<" ):
- """Extracts the exception line from the input string, and marks
- the location of the exception with a special symbol.
- """
- line_str = self.line
- line_column = self.column - 1
- if markerString:
- line_str = "".join((line_str[:line_column],
- markerString, line_str[line_column:]))
- return line_str.strip()
- def __dir__(self):
- return "loc msg pstr parserElement lineno col line " \
- "markInputline __str__ __repr__".split()
-
-class ParseException(ParseBaseException):
- """exception thrown when parse expressions don't match class;
- supported attributes by name are:
- - lineno - returns the line number of the exception text
- - col - returns the column number of the exception text
- - line - returns the line containing the exception text
- """
- pass
-
-class ParseFatalException(ParseBaseException):
- """user-throwable exception thrown when inconsistent parse content
- is found; stops all parsing immediately"""
- pass
-
-class ParseSyntaxException(ParseFatalException):
- """just like C{L{ParseFatalException}}, but thrown internally when an
- C{L{ErrorStop<And._ErrorStop>}} ('-' operator) indicates that parsing is to stop immediately because
- an unbacktrackable syntax error has been found"""
- def __init__(self, pe):
- super(ParseSyntaxException, self).__init__(
- pe.pstr, pe.loc, pe.msg, pe.parserElement)
-
-#~ class ReparseException(ParseBaseException):
- #~ """Experimental class - parse actions can raise this exception to cause
- #~ pyparsing to reparse the input string:
- #~ - with a modified input string, and/or
- #~ - with a modified start location
- #~ Set the values of the ReparseException in the constructor, and raise the
- #~ exception in a parse action to cause pyparsing to use the new string/location.
- #~ Setting the values as None causes no change to be made.
- #~ """
- #~ def __init_( self, newstring, restartLoc ):
- #~ self.newParseText = newstring
- #~ self.reparseLoc = restartLoc
-
-class RecursiveGrammarException(Exception):
- """exception thrown by C{validate()} if the grammar could be improperly recursive"""
- def __init__( self, parseElementList ):
- self.parseElementTrace = parseElementList
-
- def __str__( self ):
- return "RecursiveGrammarException: %s" % self.parseElementTrace
-
-class _ParseResultsWithOffset(object):
- def __init__(self,p1,p2):
- self.tup = (p1,p2)
- def __getitem__(self,i):
- return self.tup[i]
- def __repr__(self):
- return repr(self.tup)
- def setOffset(self,i):
- self.tup = (self.tup[0],i)
-
-class ParseResults(object):
- """Structured parse results, to provide multiple means of access to the parsed data:
- - as a list (C{len(results)})
- - by list index (C{results[0], results[1]}, etc.)
- - by attribute (C{results.<resultsName>})
- """
- def __new__(cls, toklist, name=None, asList=True, modal=True ):
- if isinstance(toklist, cls):
- return toklist
- retobj = object.__new__(cls)
- retobj.__doinit = True
- return retobj
-
- # Performance tuning: we construct a *lot* of these, so keep this
- # constructor as small and fast as possible
- def __init__( self, toklist, name=None, asList=True, modal=True, isinstance=isinstance ):
- if self.__doinit:
- self.__doinit = False
- self.__name = None
- self.__parent = None
- self.__accumNames = {}
- if isinstance(toklist, list):
- self.__toklist = toklist[:]
- elif isinstance(toklist, _generatorType):
- self.__toklist = list(toklist)
- else:
- self.__toklist = [toklist]
- self.__tokdict = dict()
-
- if name is not None and name:
- if not modal:
- self.__accumNames[name] = 0
- if isinstance(name,int):
- name = _ustr(name) # will always return a str, but use _ustr for consistency
- self.__name = name
- if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])):
- if isinstance(toklist,basestring):
- toklist = [ toklist ]
- if asList:
- if isinstance(toklist,ParseResults):
- self[name] = _ParseResultsWithOffset(toklist.copy(),0)
- else:
- self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0)
- self[name].__name = name
- else:
- try:
- self[name] = toklist[0]
- except (KeyError,TypeError,IndexError):
- self[name] = toklist
-
- def __getitem__( self, i ):
- if isinstance( i, (int,slice) ):
- return self.__toklist[i]
- else:
- if i not in self.__accumNames:
- return self.__tokdict[i][-1][0]
- else:
- return ParseResults([ v[0] for v in self.__tokdict[i] ])
-
- def __setitem__( self, k, v, isinstance=isinstance ):
- if isinstance(v,_ParseResultsWithOffset):
- self.__tokdict[k] = self.__tokdict.get(k,list()) + [v]
- sub = v[0]
- elif isinstance(k,int):
- self.__toklist[k] = v
- sub = v
- else:
- self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)]
- sub = v
- if isinstance(sub,ParseResults):
- sub.__parent = wkref(self)
-
- def __delitem__( self, i ):
- if isinstance(i,(int,slice)):
- mylen = len( self.__toklist )
- del self.__toklist[i]
-
- # convert int to slice
- if isinstance(i, int):
- if i < 0:
- i += mylen
- i = slice(i, i+1)
- # get removed indices
- removed = list(range(*i.indices(mylen)))
- removed.reverse()
- # fixup indices in token dictionary
- #~ for name in self.__tokdict:
- #~ occurrences = self.__tokdict[name]
- #~ for j in removed:
- #~ for k, (value, position) in enumerate(occurrences):
- #~ occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
- for name,occurrences in self.__tokdict.items():
- for j in removed:
- for k, (value, position) in enumerate(occurrences):
- occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
- else:
- del self.__tokdict[i]
-
- def __contains__( self, k ):
- return k in self.__tokdict
-
- def __len__( self ): return len( self.__toklist )
- def __bool__(self): return len( self.__toklist ) > 0
- __nonzero__ = __bool__
- def __iter__( self ): return iter( self.__toklist )
- def __reversed__( self ): return iter( self.__toklist[::-1] )
- def iterkeys( self ):
- """Returns all named result keys."""
- if hasattr(self.__tokdict, "iterkeys"):
- return self.__tokdict.iterkeys()
- else:
- return iter(self.__tokdict)
-
- def itervalues( self ):
- """Returns all named result values."""
- return (self[k] for k in self.iterkeys())
-
- def iteritems( self ):
- return ((k, self[k]) for k in self.iterkeys())
-
- if PY_3:
- keys = iterkeys
- values = itervalues
- items = iteritems
- else:
- def keys( self ):
- """Returns all named result keys."""
- return list(self.iterkeys())
-
- def values( self ):
- """Returns all named result values."""
- return list(self.itervalues())
-
- def items( self ):
- """Returns all named result keys and values as a list of tuples."""
- return list(self.iteritems())
-
- def haskeys( self ):
- """Since keys() returns an iterator, this method is helpful in bypassing
- code that looks for the existence of any defined results names."""
- return bool(self.__tokdict)
-
- def pop( self, *args, **kwargs):
- """Removes and returns item at specified index (default=last).
- Supports both list and dict semantics for pop(). If passed no
- argument or an integer argument, it will use list semantics
- and pop tokens from the list of parsed tokens. If passed a
- non-integer argument (most likely a string), it will use dict
- semantics and pop the corresponding value from any defined
- results names. A second default return value argument is
- supported, just as in dict.pop()."""
- if not args:
- args = [-1]
- for k,v in kwargs.items():
- if k == 'default':
- args = (args[0], v)
- else:
- raise TypeError("pop() got an unexpected keyword argument '%s'" % k)
- if (isinstance(args[0], int) or
- len(args) == 1 or
- args[0] in self):
- index = args[0]
- ret = self[index]
- del self[index]
- return ret
- else:
- defaultvalue = args[1]
- return defaultvalue
-
- def get(self, key, defaultValue=None):
- """Returns named result matching the given key, or if there is no
- such name, then returns the given C{defaultValue} or C{None} if no
- C{defaultValue} is specified."""
- if key in self:
- return self[key]
- else:
- return defaultValue
-
- def insert( self, index, insStr ):
- """Inserts new element at location index in the list of parsed tokens."""
- self.__toklist.insert(index, insStr)
- # fixup indices in token dictionary
- #~ for name in self.__tokdict:
- #~ occurrences = self.__tokdict[name]
- #~ for k, (value, position) in enumerate(occurrences):
- #~ occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))
- for name,occurrences in self.__tokdict.items():
- for k, (value, position) in enumerate(occurrences):
- occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))
-
- def append( self, item ):
- """Add single element to end of ParseResults list of elements."""
- self.__toklist.append(item)
-
- def extend( self, itemseq ):
- """Add sequence of elements to end of ParseResults list of elements."""
- if isinstance(itemseq, ParseResults):
- self += itemseq
- else:
- self.__toklist.extend(itemseq)
-
- def clear( self ):
- """Clear all elements and results names."""
- del self.__toklist[:]
- self.__tokdict.clear()
-
- def __getattr__( self, name ):
- try:
- return self[name]
- except KeyError:
- return ""
-
- if name in self.__tokdict:
- if name not in self.__accumNames:
- return self.__tokdict[name][-1][0]
- else:
- return ParseResults([ v[0] for v in self.__tokdict[name] ])
- else:
- return ""
-
- def __add__( self, other ):
- ret = self.copy()
- ret += other
- return ret
-
- def __iadd__( self, other ):
- if other.__tokdict:
- offset = len(self.__toklist)
- addoffset = lambda a: offset if a<0 else a+offset
- otheritems = other.__tokdict.items()
- otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) )
- for (k,vlist) in otheritems for v in vlist]
- for k,v in otherdictitems:
- self[k] = v
- if isinstance(v[0],ParseResults):
- v[0].__parent = wkref(self)
-
- self.__toklist += other.__toklist
- self.__accumNames.update( other.__accumNames )
- return self
-
- def __radd__(self, other):
- if isinstance(other,int) and other == 0:
- return self.copy()
-
- def __repr__( self ):
- return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) )
-
- def __str__( self ):
- return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']'
-
- def _asStringList( self, sep='' ):
- out = []
- for item in self.__toklist:
- if out and sep:
- out.append(sep)
- if isinstance( item, ParseResults ):
- out += item._asStringList()
- else:
- out.append( _ustr(item) )
- return out
-
- def asList( self ):
- """Returns the parse results as a nested list of matching tokens, all converted to strings."""
- return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist]
-
- def asDict( self ):
- """Returns the named parse results as dictionary."""
- if PY_3:
- return dict( self.items() )
- else:
- return dict( self.iteritems() )
-
- def copy( self ):
- """Returns a new copy of a C{ParseResults} object."""
- ret = ParseResults( self.__toklist )
- ret.__tokdict = self.__tokdict.copy()
- ret.__parent = self.__parent
- ret.__accumNames.update( self.__accumNames )
- ret.__name = self.__name
- return ret
-
- def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ):
- """Returns the parse results as XML. Tags are created for tokens and lists that have defined results names."""
- nl = "\n"
- out = []
- namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items()
- for v in vlist)
- nextLevelIndent = indent + " "
-
- # collapse out indents if formatting is not desired
- if not formatted:
- indent = ""
- nextLevelIndent = ""
- nl = ""
-
- selfTag = None
- if doctag is not None:
- selfTag = doctag
- else:
- if self.__name:
- selfTag = self.__name
-
- if not selfTag:
- if namedItemsOnly:
- return ""
- else:
- selfTag = "ITEM"
-
- out += [ nl, indent, "<", selfTag, ">" ]
-
- for i,res in enumerate(self.__toklist):
- if isinstance(res,ParseResults):
- if i in namedItems:
- out += [ res.asXML(namedItems[i],
- namedItemsOnly and doctag is None,
- nextLevelIndent,
- formatted)]
- else:
- out += [ res.asXML(None,
- namedItemsOnly and doctag is None,
- nextLevelIndent,
- formatted)]
- else:
- # individual token, see if there is a name for it
- resTag = None
- if i in namedItems:
- resTag = namedItems[i]
- if not resTag:
- if namedItemsOnly:
- continue
- else:
- resTag = "ITEM"
- xmlBodyText = _xml_escape(_ustr(res))
- out += [ nl, nextLevelIndent, "<", resTag, ">",
- xmlBodyText,
- "</", resTag, ">" ]
-
- out += [ nl, indent, "</", selfTag, ">" ]
- return "".join(out)
-
- def __lookup(self,sub):
- for k,vlist in self.__tokdict.items():
- for v,loc in vlist:
- if sub is v:
- return k
- return None
-
- def getName(self):
- """Returns the results name for this token expression."""
- if self.__name:
- return self.__name
- elif self.__parent:
- par = self.__parent()
- if par:
- return par.__lookup(self)
- else:
- return None
- elif (len(self) == 1 and
- len(self.__tokdict) == 1 and
- self.__tokdict.values()[0][0][1] in (0,-1)):
- return self.__tokdict.keys()[0]
- else:
- return None
-
- def dump(self,indent='',depth=0):
- """Diagnostic method for listing out the contents of a C{ParseResults}.
- Accepts an optional C{indent} argument so that this string can be embedded
- in a nested display of other data."""
- out = []
- NL = '\n'
- out.append( indent+_ustr(self.asList()) )
- if self.haskeys():
- items = sorted(self.items())
- for k,v in items:
- if out:
- out.append(NL)
- out.append( "%s%s- %s: " % (indent,(' '*depth), k) )
- if isinstance(v,ParseResults):
- if v:
- out.append( v.dump(indent,depth+1) )
- else:
- out.append(_ustr(v))
- else:
- out.append(_ustr(v))
- elif any(isinstance(vv,ParseResults) for vv in self):
- v = self
- for i,vv in enumerate(v):
- if isinstance(vv,ParseResults):
- out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) ))
- else:
- out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv)))
-
- return "".join(out)
-
- def pprint(self, *args, **kwargs):
- """Pretty-printer for parsed results as a list, using the C{pprint} module.
- Accepts additional positional or keyword args as defined for the
- C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint})"""
- pprint.pprint(self.asList(), *args, **kwargs)
-
- # add support for pickle protocol
- def __getstate__(self):
- return ( self.__toklist,
- ( self.__tokdict.copy(),
- self.__parent is not None and self.__parent() or None,
- self.__accumNames,
- self.__name ) )
-
- def __setstate__(self,state):
- self.__toklist = state[0]
- (self.__tokdict,
- par,
- inAccumNames,
- self.__name) = state[1]
- self.__accumNames = {}
- self.__accumNames.update(inAccumNames)
- if par is not None:
- self.__parent = wkref(par)
- else:
- self.__parent = None
-
- def __dir__(self):
- return dir(super(ParseResults,self)) + list(self.keys())
-
-collections.MutableMapping.register(ParseResults)
-
-def col (loc,strg):
- """Returns current column within a string, counting newlines as line separators.
- The first column is number 1.
-
- Note: the default parsing behavior is to expand tabs in the input string
- before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
- on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
- consistent view of the parsed string, the parse location, and line and column
- positions within the parsed string.
- """
- s = strg
- return 1 if loc<len(s) and s[loc] == '\n' else loc - s.rfind("\n", 0, loc)
-
-def lineno(loc,strg):
- """Returns current line number within a string, counting newlines as line separators.
- The first line is number 1.
-
- Note: the default parsing behavior is to expand tabs in the input string
- before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
- on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
- consistent view of the parsed string, the parse location, and line and column
- positions within the parsed string.
- """
- return strg.count("\n",0,loc) + 1
-
-def line( loc, strg ):
- """Returns the line of text containing loc within a string, counting newlines as line separators.
- """
- lastCR = strg.rfind("\n", 0, loc)
- nextCR = strg.find("\n", loc)
- if nextCR >= 0:
- return strg[lastCR+1:nextCR]
- else:
- return strg[lastCR+1:]
-
-def _defaultStartDebugAction( instring, loc, expr ):
- print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )))
-
-def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ):
- print ("Matched " + _ustr(expr) + " -> " + str(toks.asList()))
-
-def _defaultExceptionDebugAction( instring, loc, expr, exc ):
- print ("Exception raised:" + _ustr(exc))
-
-def nullDebugAction(*args):
- """'Do-nothing' debug action, to suppress debugging output during parsing."""
- pass
-
-# Only works on Python 3.x - nonlocal is toxic to Python 2 installs
-#~ 'decorator to trim function calls to match the arity of the target'
-#~ def _trim_arity(func, maxargs=3):
- #~ if func in singleArgBuiltins:
- #~ return lambda s,l,t: func(t)
- #~ limit = 0
- #~ foundArity = False
- #~ def wrapper(*args):
- #~ nonlocal limit,foundArity
- #~ while 1:
- #~ try:
- #~ ret = func(*args[limit:])
- #~ foundArity = True
- #~ return ret
- #~ except TypeError:
- #~ if limit == maxargs or foundArity:
- #~ raise
- #~ limit += 1
- #~ continue
- #~ return wrapper
-
-# this version is Python 2.x-3.x cross-compatible
-'decorator to trim function calls to match the arity of the target'
-def _trim_arity(func, maxargs=2):
- if func in singleArgBuiltins:
- return lambda s,l,t: func(t)
- limit = [0]
- foundArity = [False]
- def wrapper(*args):
- while 1:
- try:
- ret = func(*args[limit[0]:])
- foundArity[0] = True
- return ret
- except TypeError:
- if limit[0] <= maxargs and not foundArity[0]:
- limit[0] += 1
- continue
- raise
- return wrapper
-
-class ParserElement(object):
- """Abstract base level parser element class."""
- DEFAULT_WHITE_CHARS = " \n\t\r"
- verbose_stacktrace = False
-
- @staticmethod
- def setDefaultWhitespaceChars( chars ):
- """Overrides the default whitespace chars
- """
- ParserElement.DEFAULT_WHITE_CHARS = chars
-
- @staticmethod
- def inlineLiteralsUsing(cls):
- """
- Set class to be used for inclusion of string literals into a parser.
- """
- ParserElement.literalStringClass = cls
-
- def __init__( self, savelist=False ):
- self.parseAction = list()
- self.failAction = None
- #~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall
- self.strRepr = None
- self.resultsName = None
- self.saveAsList = savelist
- self.skipWhitespace = True
- self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
- self.copyDefaultWhiteChars = True
- self.mayReturnEmpty = False # used when checking for left-recursion
- self.keepTabs = False
- self.ignoreExprs = list()
- self.debug = False
- self.streamlined = False
- self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index
- self.errmsg = ""
- self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all)
- self.debugActions = ( None, None, None ) #custom debug actions
- self.re = None
- self.callPreparse = True # used to avoid redundant calls to preParse
- self.callDuringTry = False
-
- def copy( self ):
- """Make a copy of this C{ParserElement}. Useful for defining different parse actions
- for the same parsing pattern, using copies of the original parse element."""
- cpy = copy.copy( self )
- cpy.parseAction = self.parseAction[:]
- cpy.ignoreExprs = self.ignoreExprs[:]
- if self.copyDefaultWhiteChars:
- cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
- return cpy
-
- def setName( self, name ):
- """Define name for this expression, for use in debugging."""
- self.name = name
- self.errmsg = "Expected " + self.name
- if hasattr(self,"exception"):
- self.exception.msg = self.errmsg
- return self
-
- def setResultsName( self, name, listAllMatches=False ):
- """Define name for referencing matching tokens as a nested attribute
- of the returned parse results.
- NOTE: this returns a *copy* of the original C{ParserElement} object;
- this is so that the client can define a basic element, such as an
- integer, and reference it in multiple places with different names.
-
- You can also set results names using the abbreviated syntax,
- C{expr("name")} in place of C{expr.setResultsName("name")} -
- see L{I{__call__}<__call__>}.
- """
- newself = self.copy()
- if name.endswith("*"):
- name = name[:-1]
- listAllMatches=True
- newself.resultsName = name
- newself.modalResults = not listAllMatches
- return newself
-
- def setBreak(self,breakFlag = True):
- """Method to invoke the Python pdb debugger when this element is
- about to be parsed. Set C{breakFlag} to True to enable, False to
- disable.
- """
- if breakFlag:
- _parseMethod = self._parse
- def breaker(instring, loc, doActions=True, callPreParse=True):
- import pdb
- pdb.set_trace()
- return _parseMethod( instring, loc, doActions, callPreParse )
- breaker._originalParseMethod = _parseMethod
- self._parse = breaker
- else:
- if hasattr(self._parse,"_originalParseMethod"):
- self._parse = self._parse._originalParseMethod
- return self
-
- def setParseAction( self, *fns, **kwargs ):
- """Define action to perform when successfully matching parse element definition.
- Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},
- C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:
- - s = the original string being parsed (see note below)
- - loc = the location of the matching substring
- - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object
- If the functions in fns modify the tokens, they can return them as the return
- value from fn, and the modified list of tokens will replace the original.
- Otherwise, fn does not need to return any value.
-
- Note: the default parsing behavior is to expand tabs in the input string
- before starting the parsing process. See L{I{parseString}<parseString>} for more information
- on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
- consistent view of the parsed string, the parse location, and line and column
- positions within the parsed string.
- """
- self.parseAction = list(map(_trim_arity, list(fns)))
- self.callDuringTry = kwargs.get("callDuringTry", False)
- return self
-
- def addParseAction( self, *fns, **kwargs ):
- """Add parse action to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}."""
- self.parseAction += list(map(_trim_arity, list(fns)))
- self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
- return self
-
- def addCondition(self, *fns, **kwargs):
- """Add a boolean predicate function to expression's list of parse actions. See
- L{I{setParseAction}<setParseAction>}. Optional keyword argument C{message} can
- be used to define a custom message to be used in the raised exception."""
- msg = kwargs.get("message") or "failed user-defined condition"
- for fn in fns:
- def pa(s,l,t):
- if not bool(_trim_arity(fn)(s,l,t)):
- raise ParseException(s,l,msg)
- return t
- self.parseAction.append(pa)
- self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
- return self
-
- def setFailAction( self, fn ):
- """Define action to perform if parsing fails at this expression.
- Fail acton fn is a callable function that takes the arguments
- C{fn(s,loc,expr,err)} where:
- - s = string being parsed
- - loc = location where expression match was attempted and failed
- - expr = the parse expression that failed
- - err = the exception thrown
- The function returns no value. It may throw C{L{ParseFatalException}}
- if it is desired to stop parsing immediately."""
- self.failAction = fn
- return self
-
- def _skipIgnorables( self, instring, loc ):
- exprsFound = True
- while exprsFound:
- exprsFound = False
- for e in self.ignoreExprs:
- try:
- while 1:
- loc,dummy = e._parse( instring, loc )
- exprsFound = True
- except ParseException:
- pass
- return loc
-
- def preParse( self, instring, loc ):
- if self.ignoreExprs:
- loc = self._skipIgnorables( instring, loc )
-
- if self.skipWhitespace:
- wt = self.whiteChars
- instrlen = len(instring)
- while loc < instrlen and instring[loc] in wt:
- loc += 1
-
- return loc
-
- def parseImpl( self, instring, loc, doActions=True ):
- return loc, []
-
- def postParse( self, instring, loc, tokenlist ):
- return tokenlist
-
- #~ @profile
- def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ):
- debugging = ( self.debug ) #and doActions )
-
- if debugging or self.failAction:
- #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))
- if (self.debugActions[0] ):
- self.debugActions[0]( instring, loc, self )
- if callPreParse and self.callPreparse:
- preloc = self.preParse( instring, loc )
- else:
- preloc = loc
- tokensStart = preloc
- try:
- try:
- loc,tokens = self.parseImpl( instring, preloc, doActions )
- except IndexError:
- raise ParseException( instring, len(instring), self.errmsg, self )
- except ParseBaseException as err:
- #~ print ("Exception raised:", err)
- if self.debugActions[2]:
- self.debugActions[2]( instring, tokensStart, self, err )
- if self.failAction:
- self.failAction( instring, tokensStart, self, err )
- raise
- else:
- if callPreParse and self.callPreparse:
- preloc = self.preParse( instring, loc )
- else:
- preloc = loc
- tokensStart = preloc
- if self.mayIndexError or loc >= len(instring):
- try:
- loc,tokens = self.parseImpl( instring, preloc, doActions )
- except IndexError:
- raise ParseException( instring, len(instring), self.errmsg, self )
- else:
- loc,tokens = self.parseImpl( instring, preloc, doActions )
-
- tokens = self.postParse( instring, loc, tokens )
-
- retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults )
- if self.parseAction and (doActions or self.callDuringTry):
- if debugging:
- try:
- for fn in self.parseAction:
- tokens = fn( instring, tokensStart, retTokens )
- if tokens is not None:
- retTokens = ParseResults( tokens,
- self.resultsName,
- asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
- modal=self.modalResults )
- except ParseBaseException as err:
- #~ print "Exception raised in user parse action:", err
- if (self.debugActions[2] ):
- self.debugActions[2]( instring, tokensStart, self, err )
- raise
- else:
- for fn in self.parseAction:
- tokens = fn( instring, tokensStart, retTokens )
- if tokens is not None:
- retTokens = ParseResults( tokens,
- self.resultsName,
- asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
- modal=self.modalResults )
-
- if debugging:
- #~ print ("Matched",self,"->",retTokens.asList())
- if (self.debugActions[1] ):
- self.debugActions[1]( instring, tokensStart, loc, self, retTokens )
-
- return loc, retTokens
-
- def tryParse( self, instring, loc ):
- try:
- return self._parse( instring, loc, doActions=False )[0]
- except ParseFatalException:
- raise ParseException( instring, loc, self.errmsg, self)
-
- # this method gets repeatedly called during backtracking with the same arguments -
- # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression
- def _parseCache( self, instring, loc, doActions=True, callPreParse=True ):
- lookup = (self,instring,loc,callPreParse,doActions)
- if lookup in ParserElement._exprArgCache:
- value = ParserElement._exprArgCache[ lookup ]
- if isinstance(value, Exception):
- raise value
- return (value[0],value[1].copy())
- else:
- try:
- value = self._parseNoCache( instring, loc, doActions, callPreParse )
- ParserElement._exprArgCache[ lookup ] = (value[0],value[1].copy())
- return value
- except ParseBaseException as pe:
- pe.__traceback__ = None
- ParserElement._exprArgCache[ lookup ] = pe
- raise
-
- _parse = _parseNoCache
-
- # argument cache for optimizing repeated calls when backtracking through recursive expressions
- _exprArgCache = {}
- @staticmethod
- def resetCache():
- ParserElement._exprArgCache.clear()
-
- _packratEnabled = False
- @staticmethod
- def enablePackrat():
- """Enables "packrat" parsing, which adds memoizing to the parsing logic.
- Repeated parse attempts at the same string location (which happens
- often in many complex grammars) can immediately return a cached value,
- instead of re-executing parsing/validating code. Memoizing is done of
- both valid results and parsing exceptions.
-
- This speedup may break existing programs that use parse actions that
- have side-effects. For this reason, packrat parsing is disabled when
- you first import pyparsing. To activate the packrat feature, your
- program must call the class method C{ParserElement.enablePackrat()}. If
- your program uses C{psyco} to "compile as you go", you must call
- C{enablePackrat} before calling C{psyco.full()}. If you do not do this,
- Python will crash. For best results, call C{enablePackrat()} immediately
- after importing pyparsing.
- """
- if not ParserElement._packratEnabled:
- ParserElement._packratEnabled = True
- ParserElement._parse = ParserElement._parseCache
-
- def parseString( self, instring, parseAll=False ):
- """Execute the parse expression with the given string.
- This is the main interface to the client code, once the complete
- expression has been built.
-
- If you want the grammar to require that the entire input string be
- successfully parsed, then set C{parseAll} to True (equivalent to ending
- the grammar with C{L{StringEnd()}}).
-
- Note: C{parseString} implicitly calls C{expandtabs()} on the input string,
- in order to report proper column numbers in parse actions.
- If the input string contains tabs and
- the grammar uses parse actions that use the C{loc} argument to index into the
- string being parsed, you can ensure you have a consistent view of the input
- string by:
- - calling C{parseWithTabs} on your grammar before calling C{parseString}
- (see L{I{parseWithTabs}<parseWithTabs>})
- - define your parse action using the full C{(s,loc,toks)} signature, and
- reference the input string using the parse action's C{s} argument
- - explictly expand the tabs in your input string before calling
- C{parseString}
- """
- ParserElement.resetCache()
- if not self.streamlined:
- self.streamline()
- #~ self.saveAsList = True
- for e in self.ignoreExprs:
- e.streamline()
- if not self.keepTabs:
- instring = instring.expandtabs()
- try:
- loc, tokens = self._parse( instring, 0 )
- if parseAll:
- loc = self.preParse( instring, loc )
- se = Empty() + StringEnd()
- se._parse( instring, loc )
- except ParseBaseException as exc:
- if ParserElement.verbose_stacktrace:
- raise
- else:
- # catch and re-raise exception from here, clears out pyparsing internal stack trace
- raise exc
- else:
- return tokens
-
- def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ):
- """Scan the input string for expression matches. Each match will return the
- matching tokens, start location, and end location. May be called with optional
- C{maxMatches} argument, to clip scanning after 'n' matches are found. If
- C{overlap} is specified, then overlapping matches will be reported.
-
- Note that the start and end locations are reported relative to the string
- being parsed. See L{I{parseString}<parseString>} for more information on parsing
- strings with embedded tabs."""
- if not self.streamlined:
- self.streamline()
- for e in self.ignoreExprs:
- e.streamline()
-
- if not self.keepTabs:
- instring = _ustr(instring).expandtabs()
- instrlen = len(instring)
- loc = 0
- preparseFn = self.preParse
- parseFn = self._parse
- ParserElement.resetCache()
- matches = 0
- try:
- while loc <= instrlen and matches < maxMatches:
- try:
- preloc = preparseFn( instring, loc )
- nextLoc,tokens = parseFn( instring, preloc, callPreParse=False )
- except ParseException:
- loc = preloc+1
- else:
- if nextLoc > loc:
- matches += 1
- yield tokens, preloc, nextLoc
- if overlap:
- nextloc = preparseFn( instring, loc )
- if nextloc > loc:
- loc = nextLoc
- else:
- loc += 1
- else:
- loc = nextLoc
- else:
- loc = preloc+1
- except ParseBaseException as exc:
- if ParserElement.verbose_stacktrace:
- raise
- else:
- # catch and re-raise exception from here, clears out pyparsing internal stack trace
- raise exc
-
- def transformString( self, instring ):
- """Extension to C{L{scanString}}, to modify matching text with modified tokens that may
- be returned from a parse action. To use C{transformString}, define a grammar and
- attach a parse action to it that modifies the returned token list.
- Invoking C{transformString()} on a target string will then scan for matches,
- and replace the matched text patterns according to the logic in the parse
- action. C{transformString()} returns the resulting transformed string."""
- out = []
- lastE = 0
- # force preservation of <TAB>s, to minimize unwanted transformation of string, and to
- # keep string locs straight between transformString and scanString
- self.keepTabs = True
- try:
- for t,s,e in self.scanString( instring ):
- out.append( instring[lastE:s] )
- if t:
- if isinstance(t,ParseResults):
- out += t.asList()
- elif isinstance(t,list):
- out += t
- else:
- out.append(t)
- lastE = e
- out.append(instring[lastE:])
- out = [o for o in out if o]
- return "".join(map(_ustr,_flatten(out)))
- except ParseBaseException as exc:
- if ParserElement.verbose_stacktrace:
- raise
- else:
- # catch and re-raise exception from here, clears out pyparsing internal stack trace
- raise exc
-
- def searchString( self, instring, maxMatches=_MAX_INT ):
- """Another extension to C{L{scanString}}, simplifying the access to the tokens found
- to match the given parse expression. May be called with optional
- C{maxMatches} argument, to clip searching after 'n' matches are found.
- """
- try:
- return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
- except ParseBaseException as exc:
- if ParserElement.verbose_stacktrace:
- raise
- else:
- # catch and re-raise exception from here, clears out pyparsing internal stack trace
- raise exc
-
- def __add__(self, other ):
- """Implementation of + operator - returns C{L{And}}"""
- if isinstance( other, basestring ):
- other = ParserElement.literalStringClass( other )
- if not isinstance( other, ParserElement ):
- warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
- SyntaxWarning, stacklevel=2)
- return None
- return And( [ self, other ] )
-
- def __radd__(self, other ):
- """Implementation of + operator when left operand is not a C{L{ParserElement}}"""
- if isinstance( other, basestring ):
- other = ParserElement.literalStringClass( other )
- if not isinstance( other, ParserElement ):
- warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
- SyntaxWarning, stacklevel=2)
- return None
- return other + self
-
- def __sub__(self, other):
- """Implementation of - operator, returns C{L{And}} with error stop"""
- if isinstance( other, basestring ):
- other = ParserElement.literalStringClass( other )
- if not isinstance( other, ParserElement ):
- warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
- SyntaxWarning, stacklevel=2)
- return None
- return And( [ self, And._ErrorStop(), other ] )
-
- def __rsub__(self, other ):
- """Implementation of - operator when left operand is not a C{L{ParserElement}}"""
- if isinstance( other, basestring ):
- other = ParserElement.literalStringClass( other )
- if not isinstance( other, ParserElement ):
- warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
- SyntaxWarning, stacklevel=2)
- return None
- return other - self
-
- def __mul__(self,other):
- """Implementation of * operator, allows use of C{expr * 3} in place of
- C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer
- tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples
- may also include C{None} as in:
- - C{expr*(n,None)} or C{expr*(n,)} is equivalent
- to C{expr*n + L{ZeroOrMore}(expr)}
- (read as "at least n instances of C{expr}")
- - C{expr*(None,n)} is equivalent to C{expr*(0,n)}
- (read as "0 to n instances of C{expr}")
- - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)}
- - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)}
-
- Note that C{expr*(None,n)} does not raise an exception if
- more than n exprs exist in the input stream; that is,
- C{expr*(None,n)} does not enforce a maximum number of expr
- occurrences. If this behavior is desired, then write
- C{expr*(None,n) + ~expr}
-
- """
- if isinstance(other,int):
- minElements, optElements = other,0
- elif isinstance(other,tuple):
- other = (other + (None, None))[:2]
- if other[0] is None:
- other = (0, other[1])
- if isinstance(other[0],int) and other[1] is None:
- if other[0] == 0:
- return ZeroOrMore(self)
- if other[0] == 1:
- return OneOrMore(self)
- else:
- return self*other[0] + ZeroOrMore(self)
- elif isinstance(other[0],int) and isinstance(other[1],int):
- minElements, optElements = other
- optElements -= minElements
- else:
- raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1]))
- else:
- raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))
-
- if minElements < 0:
- raise ValueError("cannot multiply ParserElement by negative value")
- if optElements < 0:
- raise ValueError("second tuple value must be greater or equal to first tuple value")
- if minElements == optElements == 0:
- raise ValueError("cannot multiply ParserElement by 0 or (0,0)")
-
- if (optElements):
- def makeOptionalList(n):
- if n>1:
- return Optional(self + makeOptionalList(n-1))
- else:
- return Optional(self)
- if minElements:
- if minElements == 1:
- ret = self + makeOptionalList(optElements)
- else:
- ret = And([self]*minElements) + makeOptionalList(optElements)
- else:
- ret = makeOptionalList(optElements)
- else:
- if minElements == 1:
- ret = self
- else:
- ret = And([self]*minElements)
- return ret
-
- def __rmul__(self, other):
- return self.__mul__(other)
-
- def __or__(self, other ):
- """Implementation of | operator - returns C{L{MatchFirst}}"""
- if isinstance( other, basestring ):
- other = ParserElement.literalStringClass( other )
- if not isinstance( other, ParserElement ):
- warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
- SyntaxWarning, stacklevel=2)
- return None
- return MatchFirst( [ self, other ] )
-
- def __ror__(self, other ):
- """Implementation of | operator when left operand is not a C{L{ParserElement}}"""
- if isinstance( other, basestring ):
- other = ParserElement.literalStringClass( other )
- if not isinstance( other, ParserElement ):
- warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
- SyntaxWarning, stacklevel=2)
- return None
- return other | self
-
- def __xor__(self, other ):
- """Implementation of ^ operator - returns C{L{Or}}"""
- if isinstance( other, basestring ):
- other = ParserElement.literalStringClass( other )
- if not isinstance( other, ParserElement ):
- warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
- SyntaxWarning, stacklevel=2)
- return None
- return Or( [ self, other ] )
-
- def __rxor__(self, other ):
- """Implementation of ^ operator when left operand is not a C{L{ParserElement}}"""
- if isinstance( other, basestring ):
- other = ParserElement.literalStringClass( other )
- if not isinstance( other, ParserElement ):
- warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
- SyntaxWarning, stacklevel=2)
- return None
- return other ^ self
-
- def __and__(self, other ):
- """Implementation of & operator - returns C{L{Each}}"""
- if isinstance( other, basestring ):
- other = ParserElement.literalStringClass( other )
- if not isinstance( other, ParserElement ):
- warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
- SyntaxWarning, stacklevel=2)
- return None
- return Each( [ self, other ] )
-
- def __rand__(self, other ):
- """Implementation of & operator when left operand is not a C{L{ParserElement}}"""
- if isinstance( other, basestring ):
- other = ParserElement.literalStringClass( other )
- if not isinstance( other, ParserElement ):
- warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
- SyntaxWarning, stacklevel=2)
- return None
- return other & self
-
- def __invert__( self ):
- """Implementation of ~ operator - returns C{L{NotAny}}"""
- return NotAny( self )
-
- def __call__(self, name=None):
- """Shortcut for C{L{setResultsName}}, with C{listAllMatches=default}::
- userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno")
- could be written as::
- userdata = Word(alphas)("name") + Word(nums+"-")("socsecno")
-
- If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be
- passed as C{True}.
-
- If C{name} is omitted, same as calling C{L{copy}}.
- """
- if name is not None:
- return self.setResultsName(name)
- else:
- return self.copy()
-
- def suppress( self ):
- """Suppresses the output of this C{ParserElement}; useful to keep punctuation from
- cluttering up returned output.
- """
- return Suppress( self )
-
- def leaveWhitespace( self ):
- """Disables the skipping of whitespace before matching the characters in the
- C{ParserElement}'s defined pattern. This is normally only used internally by
- the pyparsing module, but may be needed in some whitespace-sensitive grammars.
- """
- self.skipWhitespace = False
- return self
-
- def setWhitespaceChars( self, chars ):
- """Overrides the default whitespace chars
- """
- self.skipWhitespace = True
- self.whiteChars = chars
- self.copyDefaultWhiteChars = False
- return self
-
- def parseWithTabs( self ):
- """Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string.
- Must be called before C{parseString} when the input grammar contains elements that
- match C{<TAB>} characters."""
- self.keepTabs = True
- return self
-
- def ignore( self, other ):
- """Define expression to be ignored (e.g., comments) while doing pattern
- matching; may be called repeatedly, to define multiple comment or other
- ignorable patterns.
- """
- if isinstance( other, Suppress ):
- if other not in self.ignoreExprs:
- self.ignoreExprs.append( other.copy() )
- else:
- self.ignoreExprs.append( Suppress( other.copy() ) )
- return self
-
- def setDebugActions( self, startAction, successAction, exceptionAction ):
- """Enable display of debugging messages while doing pattern matching."""
- self.debugActions = (startAction or _defaultStartDebugAction,
- successAction or _defaultSuccessDebugAction,
- exceptionAction or _defaultExceptionDebugAction)
- self.debug = True
- return self
-
- def setDebug( self, flag=True ):
- """Enable display of debugging messages while doing pattern matching.
- Set C{flag} to True to enable, False to disable."""
- if flag:
- self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction )
- else:
- self.debug = False
- return self
-
- def __str__( self ):
- return self.name
-
- def __repr__( self ):
- return _ustr(self)
-
- def streamline( self ):
- self.streamlined = True
- self.strRepr = None
- return self
-
- def checkRecursion( self, parseElementList ):
- pass
-
- def validate( self, validateTrace=[] ):
- """Check defined expressions for valid structure, check for infinite recursive definitions."""
- self.checkRecursion( [] )
-
- def parseFile( self, file_or_filename, parseAll=False ):
- """Execute the parse expression on the given file or filename.
- If a filename is specified (instead of a file object),
- the entire file is opened, read, and closed before parsing.
- """
- try:
- file_contents = file_or_filename.read()
- except AttributeError:
- f = open(file_or_filename, "r")
- file_contents = f.read()
- f.close()
- try:
- return self.parseString(file_contents, parseAll)
- except ParseBaseException as exc:
- if ParserElement.verbose_stacktrace:
- raise
- else:
- # catch and re-raise exception from here, clears out pyparsing internal stack trace
- raise exc
-
- def __eq__(self,other):
- if isinstance(other, ParserElement):
- return self is other or self.__dict__ == other.__dict__
- elif isinstance(other, basestring):
- try:
- self.parseString(_ustr(other), parseAll=True)
- return True
- except ParseBaseException:
- return False
- else:
- return super(ParserElement,self)==other
-
- def __ne__(self,other):
- return not (self == other)
-
- def __hash__(self):
- return hash(id(self))
-
- def __req__(self,other):
- return self == other
-
- def __rne__(self,other):
- return not (self == other)
-
- def runTests(self, tests, parseAll=False):
- """Execute the parse expression on a series of test strings, showing each
- test, the parsed results or where the parse failed. Quick and easy way to
- run a parse expression against a list of sample strings.
-
- Parameters:
- - tests - a list of separate test strings, or a multiline string of test strings
- - parseAll - (default=False) - flag to pass to C{L{parseString}} when running tests
- """
- if isinstance(tests, basestring):
- tests = map(str.strip, tests.splitlines())
- for t in tests:
- out = [t]
- try:
- out.append(self.parseString(t, parseAll=parseAll).dump())
- except ParseException as pe:
- if '\n' in t:
- out.append(line(pe.loc, t))
- out.append(' '*(col(pe.loc,t)-1) + '^')
- else:
- out.append(' '*pe.loc + '^')
- out.append(str(pe))
- out.append('')
- print('\n'.join(out))
-
-
-class Token(ParserElement):
- """Abstract C{ParserElement} subclass, for defining atomic matching patterns."""
- def __init__( self ):
- super(Token,self).__init__( savelist=False )
-
-
-class Empty(Token):
- """An empty token, will always match."""
- def __init__( self ):
- super(Empty,self).__init__()
- self.name = "Empty"
- self.mayReturnEmpty = True
- self.mayIndexError = False
-
-
-class NoMatch(Token):
- """A token that will never match."""
- def __init__( self ):
- super(NoMatch,self).__init__()
- self.name = "NoMatch"
- self.mayReturnEmpty = True
- self.mayIndexError = False
- self.errmsg = "Unmatchable token"
-
- def parseImpl( self, instring, loc, doActions=True ):
- raise ParseException(instring, loc, self.errmsg, self)
-
-
-class Literal(Token):
- """Token to exactly match a specified string."""
- def __init__( self, matchString ):
- super(Literal,self).__init__()
- self.match = matchString
- self.matchLen = len(matchString)
- try:
- self.firstMatchChar = matchString[0]
- except IndexError:
- warnings.warn("null string passed to Literal; use Empty() instead",
- SyntaxWarning, stacklevel=2)
- self.__class__ = Empty
- self.name = '"%s"' % _ustr(self.match)
- self.errmsg = "Expected " + self.name
- self.mayReturnEmpty = False
- self.mayIndexError = False
-
- # Performance tuning: this routine gets called a *lot*
- # if this is a single character match string and the first character matches,
- # short-circuit as quickly as possible, and avoid calling startswith
- #~ @profile
- def parseImpl( self, instring, loc, doActions=True ):
- if (instring[loc] == self.firstMatchChar and
- (self.matchLen==1 or instring.startswith(self.match,loc)) ):
- return loc+self.matchLen, self.match
- raise ParseException(instring, loc, self.errmsg, self)
-_L = Literal
-ParserElement.literalStringClass = Literal
-
-class Keyword(Token):
- """Token to exactly match a specified string as a keyword, that is, it must be
- immediately followed by a non-keyword character. Compare with C{L{Literal}}::
- Literal("if") will match the leading C{'if'} in C{'ifAndOnlyIf'}.
- Keyword("if") will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'}
- Accepts two optional constructor arguments in addition to the keyword string:
- C{identChars} is a string of characters that would be valid identifier characters,
- defaulting to all alphanumerics + "_" and "$"; C{caseless} allows case-insensitive
- matching, default is C{False}.
- """
- DEFAULT_KEYWORD_CHARS = alphanums+"_$"
-
- def __init__( self, matchString, identChars=DEFAULT_KEYWORD_CHARS, caseless=False ):
- super(Keyword,self).__init__()
- self.match = matchString
- self.matchLen = len(matchString)
- try:
- self.firstMatchChar = matchString[0]
- except IndexError:
- warnings.warn("null string passed to Keyword; use Empty() instead",
- SyntaxWarning, stacklevel=2)
- self.name = '"%s"' % self.match
- self.errmsg = "Expected " + self.name
- self.mayReturnEmpty = False
- self.mayIndexError = False
- self.caseless = caseless
- if caseless:
- self.caselessmatch = matchString.upper()
- identChars = identChars.upper()
- self.identChars = set(identChars)
-
- def parseImpl( self, instring, loc, doActions=True ):
- if self.caseless:
- if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
- (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and
- (loc == 0 or instring[loc-1].upper() not in self.identChars) ):
- return loc+self.matchLen, self.match
- else:
- if (instring[loc] == self.firstMatchChar and
- (self.matchLen==1 or instring.startswith(self.match,loc)) and
- (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and
- (loc == 0 or instring[loc-1] not in self.identChars) ):
- return loc+self.matchLen, self.match
- raise ParseException(instring, loc, self.errmsg, self)
-
- def copy(self):
- c = super(Keyword,self).copy()
- c.identChars = Keyword.DEFAULT_KEYWORD_CHARS
- return c
-
- @staticmethod
- def setDefaultKeywordChars( chars ):
- """Overrides the default Keyword chars
- """
- Keyword.DEFAULT_KEYWORD_CHARS = chars
-
-class CaselessLiteral(Literal):
- """Token to match a specified string, ignoring case of letters.
- Note: the matched results will always be in the case of the given
- match string, NOT the case of the input text.
- """
- def __init__( self, matchString ):
- super(CaselessLiteral,self).__init__( matchString.upper() )
- # Preserve the defining literal.
- self.returnString = matchString
- self.name = "'%s'" % self.returnString
- self.errmsg = "Expected " + self.name
-
- def parseImpl( self, instring, loc, doActions=True ):
- if instring[ loc:loc+self.matchLen ].upper() == self.match:
- return loc+self.matchLen, self.returnString
- raise ParseException(instring, loc, self.errmsg, self)
-
-class CaselessKeyword(Keyword):
- def __init__( self, matchString, identChars=Keyword.DEFAULT_KEYWORD_CHARS ):
- super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True )
-
- def parseImpl( self, instring, loc, doActions=True ):
- if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
- (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ):
- return loc+self.matchLen, self.match
- raise ParseException(instring, loc, self.errmsg, self)
-
-class Word(Token):
- """Token for matching words composed of allowed character sets.
- Defined with string containing all allowed initial characters,
- an optional string containing allowed body characters (if omitted,
- defaults to the initial character set), and an optional minimum,
- maximum, and/or exact length. The default value for C{min} is 1 (a
- minimum value < 1 is not valid); the default values for C{max} and C{exact}
- are 0, meaning no maximum or exact length restriction. An optional
- C{exclude} parameter can list characters that might be found in
- the input C{bodyChars} string; useful to define a word of all printables
- except for one or two characters, for instance.
- """
- def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ):
- super(Word,self).__init__()
- if excludeChars:
- initChars = ''.join(c for c in initChars if c not in excludeChars)
- if bodyChars:
- bodyChars = ''.join(c for c in bodyChars if c not in excludeChars)
- self.initCharsOrig = initChars
- self.initChars = set(initChars)
- if bodyChars :
- self.bodyCharsOrig = bodyChars
- self.bodyChars = set(bodyChars)
- else:
- self.bodyCharsOrig = initChars
- self.bodyChars = set(initChars)
-
- self.maxSpecified = max > 0
-
- if min < 1:
- raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")
-
- self.minLen = min
-
- if max > 0:
- self.maxLen = max
- else:
- self.maxLen = _MAX_INT
-
- if exact > 0:
- self.maxLen = exact
- self.minLen = exact
-
- self.name = _ustr(self)
- self.errmsg = "Expected " + self.name
- self.mayIndexError = False
- self.asKeyword = asKeyword
-
- if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0):
- if self.bodyCharsOrig == self.initCharsOrig:
- self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)
- elif len(self.initCharsOrig) == 1:
- self.reString = "%s[%s]*" % \
- (re.escape(self.initCharsOrig),
- _escapeRegexRangeChars(self.bodyCharsOrig),)
- else:
- self.reString = "[%s][%s]*" % \
- (_escapeRegexRangeChars(self.initCharsOrig),
- _escapeRegexRangeChars(self.bodyCharsOrig),)
- if self.asKeyword:
- self.reString = r"\b"+self.reString+r"\b"
- try:
- self.re = re.compile( self.reString )
- except:
- self.re = None
-
- def parseImpl( self, instring, loc, doActions=True ):
- if self.re:
- result = self.re.match(instring,loc)
- if not result:
- raise ParseException(instring, loc, self.errmsg, self)
-
- loc = result.end()
- return loc, result.group()
-
- if not(instring[ loc ] in self.initChars):
- raise ParseException(instring, loc, self.errmsg, self)
-
- start = loc
- loc += 1
- instrlen = len(instring)
- bodychars = self.bodyChars
- maxloc = start + self.maxLen
- maxloc = min( maxloc, instrlen )
- while loc < maxloc and instring[loc] in bodychars:
- loc += 1
-
- throwException = False
- if loc - start < self.minLen:
- throwException = True
- if self.maxSpecified and loc < instrlen and instring[loc] in bodychars:
- throwException = True
- if self.asKeyword:
- if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars):
- throwException = True
-
- if throwException:
- raise ParseException(instring, loc, self.errmsg, self)
-
- return loc, instring[start:loc]
-
- def __str__( self ):
- try:
- return super(Word,self).__str__()
- except:
- pass
-
-
- if self.strRepr is None:
-
- def charsAsStr(s):
- if len(s)>4:
- return s[:4]+"..."
- else:
- return s
-
- if ( self.initCharsOrig != self.bodyCharsOrig ):
- self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) )
- else:
- self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)
-
- return self.strRepr
-
-
-class Regex(Token):
- """Token for matching strings that match a given regular expression.
- Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.
- """
- compiledREtype = type(re.compile("[A-Z]"))
- def __init__( self, pattern, flags=0):
- """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags."""
- super(Regex,self).__init__()
-
- if isinstance(pattern, basestring):
- if len(pattern) == 0:
- warnings.warn("null string passed to Regex; use Empty() instead",
- SyntaxWarning, stacklevel=2)
-
- self.pattern = pattern
- self.flags = flags
-
- try:
- self.re = re.compile(self.pattern, self.flags)
- self.reString = self.pattern
- except sre_constants.error:
- warnings.warn("invalid pattern (%s) passed to Regex" % pattern,
- SyntaxWarning, stacklevel=2)
- raise
-
- elif isinstance(pattern, Regex.compiledREtype):
- self.re = pattern
- self.pattern = \
- self.reString = str(pattern)
- self.flags = flags
-
- else:
- raise ValueError("Regex may only be constructed with a string or a compiled RE object")
-
- self.name = _ustr(self)
- self.errmsg = "Expected " + self.name
- self.mayIndexError = False
- self.mayReturnEmpty = True
-
- def parseImpl( self, instring, loc, doActions=True ):
- result = self.re.match(instring,loc)
- if not result:
- raise ParseException(instring, loc, self.errmsg, self)
-
- loc = result.end()
- d = result.groupdict()
- ret = ParseResults(result.group())
- if d:
- for k in d:
- ret[k] = d[k]
- return loc,ret
-
- def __str__( self ):
- try:
- return super(Regex,self).__str__()
- except:
- pass
-
- if self.strRepr is None:
- self.strRepr = "Re:(%s)" % repr(self.pattern)
-
- return self.strRepr
-
-
-class QuotedString(Token):
- """Token for matching strings that are delimited by quoting characters.
- """
- def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None):
- """
- Defined with the following parameters:
- - quoteChar - string of one or more characters defining the quote delimiting string
- - escChar - character to escape quotes, typically backslash (default=None)
- - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=None)
- - multiline - boolean indicating whether quotes can span multiple lines (default=C{False})
- - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True})
- - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar)
- """
- super(QuotedString,self).__init__()
-
- # remove white space from quote chars - wont work anyway
- quoteChar = quoteChar.strip()
- if len(quoteChar) == 0:
- warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
- raise SyntaxError()
-
- if endQuoteChar is None:
- endQuoteChar = quoteChar
- else:
- endQuoteChar = endQuoteChar.strip()
- if len(endQuoteChar) == 0:
- warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
- raise SyntaxError()
-
- self.quoteChar = quoteChar
- self.quoteCharLen = len(quoteChar)
- self.firstQuoteChar = quoteChar[0]
- self.endQuoteChar = endQuoteChar
- self.endQuoteCharLen = len(endQuoteChar)
- self.escChar = escChar
- self.escQuote = escQuote
- self.unquoteResults = unquoteResults
-
- if multiline:
- self.flags = re.MULTILINE | re.DOTALL
- self.pattern = r'%s(?:[^%s%s]' % \
- ( re.escape(self.quoteChar),
- _escapeRegexRangeChars(self.endQuoteChar[0]),
- (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
- else:
- self.flags = 0
- self.pattern = r'%s(?:[^%s\n\r%s]' % \
- ( re.escape(self.quoteChar),
- _escapeRegexRangeChars(self.endQuoteChar[0]),
- (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
- if len(self.endQuoteChar) > 1:
- self.pattern += (
- '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]),
- _escapeRegexRangeChars(self.endQuoteChar[i]))
- for i in range(len(self.endQuoteChar)-1,0,-1)) + ')'
- )
- if escQuote:
- self.pattern += (r'|(?:%s)' % re.escape(escQuote))
- if escChar:
- self.pattern += (r'|(?:%s.)' % re.escape(escChar))
- self.escCharReplacePattern = re.escape(self.escChar)+"(.)"
- self.pattern += (r')*%s' % re.escape(self.endQuoteChar))
-
- try:
- self.re = re.compile(self.pattern, self.flags)
- self.reString = self.pattern
- except sre_constants.error:
- warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern,
- SyntaxWarning, stacklevel=2)
- raise
-
- self.name = _ustr(self)
- self.errmsg = "Expected " + self.name
- self.mayIndexError = False
- self.mayReturnEmpty = True
-
- def parseImpl( self, instring, loc, doActions=True ):
- result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None
- if not result:
- raise ParseException(instring, loc, self.errmsg, self)
-
- loc = result.end()
- ret = result.group()
-
- if self.unquoteResults:
-
- # strip off quotes
- ret = ret[self.quoteCharLen:-self.endQuoteCharLen]
-
- if isinstance(ret,basestring):
- # replace escaped characters
- if self.escChar:
- ret = re.sub(self.escCharReplacePattern,"\g<1>",ret)
-
- # replace escaped quotes
- if self.escQuote:
- ret = ret.replace(self.escQuote, self.endQuoteChar)
-
- return loc, ret
-
- def __str__( self ):
- try:
- return super(QuotedString,self).__str__()
- except:
- pass
-
- if self.strRepr is None:
- self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar)
-
- return self.strRepr
-
-
-class CharsNotIn(Token):
- """Token for matching words composed of characters *not* in a given set.
- Defined with string containing all disallowed characters, and an optional
- minimum, maximum, and/or exact length. The default value for C{min} is 1 (a
- minimum value < 1 is not valid); the default values for C{max} and C{exact}
- are 0, meaning no maximum or exact length restriction.
- """
- def __init__( self, notChars, min=1, max=0, exact=0 ):
- super(CharsNotIn,self).__init__()
- self.skipWhitespace = False
- self.notChars = notChars
-
- if min < 1:
- raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted")
-
- self.minLen = min
-
- if max > 0:
- self.maxLen = max
- else:
- self.maxLen = _MAX_INT
-
- if exact > 0:
- self.maxLen = exact
- self.minLen = exact
-
- self.name = _ustr(self)
- self.errmsg = "Expected " + self.name
- self.mayReturnEmpty = ( self.minLen == 0 )
- self.mayIndexError = False
-
- def parseImpl( self, instring, loc, doActions=True ):
- if instring[loc] in self.notChars:
- raise ParseException(instring, loc, self.errmsg, self)
-
- start = loc
- loc += 1
- notchars = self.notChars
- maxlen = min( start+self.maxLen, len(instring) )
- while loc < maxlen and \
- (instring[loc] not in notchars):
- loc += 1
-
- if loc - start < self.minLen:
- raise ParseException(instring, loc, self.errmsg, self)
-
- return loc, instring[start:loc]
-
- def __str__( self ):
- try:
- return super(CharsNotIn, self).__str__()
- except:
- pass
-
- if self.strRepr is None:
- if len(self.notChars) > 4:
- self.strRepr = "!W:(%s...)" % self.notChars[:4]
- else:
- self.strRepr = "!W:(%s)" % self.notChars
-
- return self.strRepr
-
-class White(Token):
- """Special matching class for matching whitespace. Normally, whitespace is ignored
- by pyparsing grammars. This class is included when some whitespace structures
- are significant. Define with a string containing the whitespace characters to be
- matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments,
- as defined for the C{L{Word}} class."""
- whiteStrs = {
- " " : "<SPC>",
- "\t": "<TAB>",
- "\n": "<LF>",
- "\r": "<CR>",
- "\f": "<FF>",
- }
- def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0):
- super(White,self).__init__()
- self.matchWhite = ws
- self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) )
- #~ self.leaveWhitespace()
- self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite))
- self.mayReturnEmpty = True
- self.errmsg = "Expected " + self.name
-
- self.minLen = min
-
- if max > 0:
- self.maxLen = max
- else:
- self.maxLen = _MAX_INT
-
- if exact > 0:
- self.maxLen = exact
- self.minLen = exact
-
- def parseImpl( self, instring, loc, doActions=True ):
- if not(instring[ loc ] in self.matchWhite):
- raise ParseException(instring, loc, self.errmsg, self)
- start = loc
- loc += 1
- maxloc = start + self.maxLen
- maxloc = min( maxloc, len(instring) )
- while loc < maxloc and instring[loc] in self.matchWhite:
- loc += 1
-
- if loc - start < self.minLen:
- raise ParseException(instring, loc, self.errmsg, self)
-
- return loc, instring[start:loc]
-
-
-class _PositionToken(Token):
- def __init__( self ):
- super(_PositionToken,self).__init__()
- self.name=self.__class__.__name__
- self.mayReturnEmpty = True
- self.mayIndexError = False
-
-class GoToColumn(_PositionToken):
- """Token to advance to a specific column of input text; useful for tabular report scraping."""
- def __init__( self, colno ):
- super(GoToColumn,self).__init__()
- self.col = colno
-
- def preParse( self, instring, loc ):
- if col(loc,instring) != self.col:
- instrlen = len(instring)
- if self.ignoreExprs:
- loc = self._skipIgnorables( instring, loc )
- while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col :
- loc += 1
- return loc
-
- def parseImpl( self, instring, loc, doActions=True ):
- thiscol = col( loc, instring )
- if thiscol > self.col:
- raise ParseException( instring, loc, "Text not in expected column", self )
- newloc = loc + self.col - thiscol
- ret = instring[ loc: newloc ]
- return newloc, ret
-
-class LineStart(_PositionToken):
- """Matches if current position is at the beginning of a line within the parse string"""
- def __init__( self ):
- super(LineStart,self).__init__()
- self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") )
- self.errmsg = "Expected start of line"
-
- def preParse( self, instring, loc ):
- preloc = super(LineStart,self).preParse(instring,loc)
- if instring[preloc] == "\n":
- loc += 1
- return loc
-
- def parseImpl( self, instring, loc, doActions=True ):
- if not( loc==0 or
- (loc == self.preParse( instring, 0 )) or
- (instring[loc-1] == "\n") ): #col(loc, instring) != 1:
- raise ParseException(instring, loc, self.errmsg, self)
- return loc, []
-
-class LineEnd(_PositionToken):
- """Matches if current position is at the end of a line within the parse string"""
- def __init__( self ):
- super(LineEnd,self).__init__()
- self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") )
- self.errmsg = "Expected end of line"
-
- def parseImpl( self, instring, loc, doActions=True ):
- if loc<len(instring):
- if instring[loc] == "\n":
- return loc+1, "\n"
- else:
- raise ParseException(instring, loc, self.errmsg, self)
- elif loc == len(instring):
- return loc+1, []
- else:
- raise ParseException(instring, loc, self.errmsg, self)
-
-class StringStart(_PositionToken):
- """Matches if current position is at the beginning of the parse string"""
- def __init__( self ):
- super(StringStart,self).__init__()
- self.errmsg = "Expected start of text"
-
- def parseImpl( self, instring, loc, doActions=True ):
- if loc != 0:
- # see if entire string up to here is just whitespace and ignoreables
- if loc != self.preParse( instring, 0 ):
- raise ParseException(instring, loc, self.errmsg, self)
- return loc, []
-
-class StringEnd(_PositionToken):
- """Matches if current position is at the end of the parse string"""
- def __init__( self ):
- super(StringEnd,self).__init__()
- self.errmsg = "Expected end of text"
-
- def parseImpl( self, instring, loc, doActions=True ):
- if loc < len(instring):
- raise ParseException(instring, loc, self.errmsg, self)
- elif loc == len(instring):
- return loc+1, []
- elif loc > len(instring):
- return loc, []
- else:
- raise ParseException(instring, loc, self.errmsg, self)
-
-class WordStart(_PositionToken):
- """Matches if the current position is at the beginning of a Word, and
- is not preceded by any character in a given set of C{wordChars}
- (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
- use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of
- the string being parsed, or at the beginning of a line.
- """
- def __init__(self, wordChars = printables):
- super(WordStart,self).__init__()
- self.wordChars = set(wordChars)
- self.errmsg = "Not at the start of a word"
-
- def parseImpl(self, instring, loc, doActions=True ):
- if loc != 0:
- if (instring[loc-1] in self.wordChars or
- instring[loc] not in self.wordChars):
- raise ParseException(instring, loc, self.errmsg, self)
- return loc, []
-
-class WordEnd(_PositionToken):
- """Matches if the current position is at the end of a Word, and
- is not followed by any character in a given set of C{wordChars}
- (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
- use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of
- the string being parsed, or at the end of a line.
- """
- def __init__(self, wordChars = printables):
- super(WordEnd,self).__init__()
- self.wordChars = set(wordChars)
- self.skipWhitespace = False
- self.errmsg = "Not at the end of a word"
-
- def parseImpl(self, instring, loc, doActions=True ):
- instrlen = len(instring)
- if instrlen>0 and loc<instrlen:
- if (instring[loc] in self.wordChars or
- instring[loc-1] not in self.wordChars):
- raise ParseException(instring, loc, self.errmsg, self)
- return loc, []
-
-
-class ParseExpression(ParserElement):
- """Abstract subclass of ParserElement, for combining and post-processing parsed tokens."""
- def __init__( self, exprs, savelist = False ):
- super(ParseExpression,self).__init__(savelist)
- if isinstance( exprs, _generatorType ):
- exprs = list(exprs)
-
- if isinstance( exprs, basestring ):
- self.exprs = [ Literal( exprs ) ]
- elif isinstance( exprs, collections.Sequence ):
- # if sequence of strings provided, wrap with Literal
- if all(isinstance(expr, basestring) for expr in exprs):
- exprs = map(Literal, exprs)
- self.exprs = list(exprs)
- else:
- try:
- self.exprs = list( exprs )
- except TypeError:
- self.exprs = [ exprs ]
- self.callPreparse = False
-
- def __getitem__( self, i ):
- return self.exprs[i]
-
- def append( self, other ):
- self.exprs.append( other )
- self.strRepr = None
- return self
-
- def leaveWhitespace( self ):
- """Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on
- all contained expressions."""
- self.skipWhitespace = False
- self.exprs = [ e.copy() for e in self.exprs ]
- for e in self.exprs:
- e.leaveWhitespace()
- return self
-
- def ignore( self, other ):
- if isinstance( other, Suppress ):
- if other not in self.ignoreExprs:
- super( ParseExpression, self).ignore( other )
- for e in self.exprs:
- e.ignore( self.ignoreExprs[-1] )
- else:
- super( ParseExpression, self).ignore( other )
- for e in self.exprs:
- e.ignore( self.ignoreExprs[-1] )
- return self
-
- def __str__( self ):
- try:
- return super(ParseExpression,self).__str__()
- except:
- pass
-
- if self.strRepr is None:
- self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) )
- return self.strRepr
-
- def streamline( self ):
- super(ParseExpression,self).streamline()
-
- for e in self.exprs:
- e.streamline()
-
- # collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d )
- # but only if there are no parse actions or resultsNames on the nested And's
- # (likewise for Or's and MatchFirst's)
- if ( len(self.exprs) == 2 ):
- other = self.exprs[0]
- if ( isinstance( other, self.__class__ ) and
- not(other.parseAction) and
- other.resultsName is None and
- not other.debug ):
- self.exprs = other.exprs[:] + [ self.exprs[1] ]
- self.strRepr = None
- self.mayReturnEmpty |= other.mayReturnEmpty
- self.mayIndexError |= other.mayIndexError
-
- other = self.exprs[-1]
- if ( isinstance( other, self.__class__ ) and
- not(other.parseAction) and
- other.resultsName is None and
- not other.debug ):
- self.exprs = self.exprs[:-1] + other.exprs[:]
- self.strRepr = None
- self.mayReturnEmpty |= other.mayReturnEmpty
- self.mayIndexError |= other.mayIndexError
-
- self.errmsg = "Expected " + str(self)
-
- return self
-
- def setResultsName( self, name, listAllMatches=False ):
- ret = super(ParseExpression,self).setResultsName(name,listAllMatches)
- return ret
-
- def validate( self, validateTrace=[] ):
- tmp = validateTrace[:]+[self]
- for e in self.exprs:
- e.validate(tmp)
- self.checkRecursion( [] )
-
- def copy(self):
- ret = super(ParseExpression,self).copy()
- ret.exprs = [e.copy() for e in self.exprs]
- return ret
-
-class And(ParseExpression):
- """Requires all given C{ParseExpression}s to be found in the given order.
- Expressions may be separated by whitespace.
- May be constructed using the C{'+'} operator.
- """
-
- class _ErrorStop(Empty):
- def __init__(self, *args, **kwargs):
- super(And._ErrorStop,self).__init__(*args, **kwargs)
- self.name = '-'
- self.leaveWhitespace()
-
- def __init__( self, exprs, savelist = True ):
- super(And,self).__init__(exprs, savelist)
- self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
- self.setWhitespaceChars( self.exprs[0].whiteChars )
- self.skipWhitespace = self.exprs[0].skipWhitespace
- self.callPreparse = True
-
- def parseImpl( self, instring, loc, doActions=True ):
- # pass False as last arg to _parse for first element, since we already
- # pre-parsed the string as part of our And pre-parsing
- loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False )
- errorStop = False
- for e in self.exprs[1:]:
- if isinstance(e, And._ErrorStop):
- errorStop = True
- continue
- if errorStop:
- try:
- loc, exprtokens = e._parse( instring, loc, doActions )
- except ParseSyntaxException:
- raise
- except ParseBaseException as pe:
- pe.__traceback__ = None
- raise ParseSyntaxException(pe)
- except IndexError:
- raise ParseSyntaxException( ParseException(instring, len(instring), self.errmsg, self) )
- else:
- loc, exprtokens = e._parse( instring, loc, doActions )
- if exprtokens or exprtokens.haskeys():
- resultlist += exprtokens
- return loc, resultlist
-
- def __iadd__(self, other ):
- if isinstance( other, basestring ):
- other = Literal( other )
- return self.append( other ) #And( [ self, other ] )
-
- def checkRecursion( self, parseElementList ):
- subRecCheckList = parseElementList[:] + [ self ]
- for e in self.exprs:
- e.checkRecursion( subRecCheckList )
- if not e.mayReturnEmpty:
- break
-
- def __str__( self ):
- if hasattr(self,"name"):
- return self.name
-
- if self.strRepr is None:
- self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}"
-
- return self.strRepr
-
-
-class Or(ParseExpression):
- """Requires that at least one C{ParseExpression} is found.
- If two expressions match, the expression that matches the longest string will be used.
- May be constructed using the C{'^'} operator.
- """
- def __init__( self, exprs, savelist = False ):
- super(Or,self).__init__(exprs, savelist)
- if self.exprs:
- self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
- else:
- self.mayReturnEmpty = True
-
- def parseImpl( self, instring, loc, doActions=True ):
- maxExcLoc = -1
- maxException = None
- matches = []
- for e in self.exprs:
- try:
- loc2 = e.tryParse( instring, loc )
- except ParseException as err:
- err.__traceback__ = None
- if err.loc > maxExcLoc:
- maxException = err
- maxExcLoc = err.loc
- except IndexError:
- if len(instring) > maxExcLoc:
- maxException = ParseException(instring,len(instring),e.errmsg,self)
- maxExcLoc = len(instring)
- else:
- # save match among all matches, to retry longest to shortest
- matches.append((loc2, e))
-
- if matches:
- matches.sort(key=lambda x: -x[0])
- for _,e in matches:
- try:
- return e._parse( instring, loc, doActions )
- except ParseException as err:
- err.__traceback__ = None
- if err.loc > maxExcLoc:
- maxException = err
- maxExcLoc = err.loc
-
- if maxException is not None:
- maxException.msg = self.errmsg
- raise maxException
- else:
- raise ParseException(instring, loc, "no defined alternatives to match", self)
-
-
- def __ixor__(self, other ):
- if isinstance( other, basestring ):
- other = ParserElement.literalStringClass( other )
- return self.append( other ) #Or( [ self, other ] )
-
- def __str__( self ):
- if hasattr(self,"name"):
- return self.name
-
- if self.strRepr is None:
- self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}"
-
- return self.strRepr
-
- def checkRecursion( self, parseElementList ):
- subRecCheckList = parseElementList[:] + [ self ]
- for e in self.exprs:
- e.checkRecursion( subRecCheckList )
-
-
-class MatchFirst(ParseExpression):
- """Requires that at least one C{ParseExpression} is found.
- If two expressions match, the first one listed is the one that will match.
- May be constructed using the C{'|'} operator.
- """
- def __init__( self, exprs, savelist = False ):
- super(MatchFirst,self).__init__(exprs, savelist)
- if self.exprs:
- self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
- else:
- self.mayReturnEmpty = True
-
- def parseImpl( self, instring, loc, doActions=True ):
- maxExcLoc = -1
- maxException = None
- for e in self.exprs:
- try:
- ret = e._parse( instring, loc, doActions )
- return ret
- except ParseException as err:
- if err.loc > maxExcLoc:
- maxException = err
- maxExcLoc = err.loc
- except IndexError:
- if len(instring) > maxExcLoc:
- maxException = ParseException(instring,len(instring),e.errmsg,self)
- maxExcLoc = len(instring)
-
- # only got here if no expression matched, raise exception for match that made it the furthest
- else:
- if maxException is not None:
- maxException.msg = self.errmsg
- raise maxException
- else:
- raise ParseException(instring, loc, "no defined alternatives to match", self)
-
- def __ior__(self, other ):
- if isinstance( other, basestring ):
- other = ParserElement.literalStringClass( other )
- return self.append( other ) #MatchFirst( [ self, other ] )
-
- def __str__( self ):
- if hasattr(self,"name"):
- return self.name
-
- if self.strRepr is None:
- self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}"
-
- return self.strRepr
-
- def checkRecursion( self, parseElementList ):
- subRecCheckList = parseElementList[:] + [ self ]
- for e in self.exprs:
- e.checkRecursion( subRecCheckList )
-
-
-class Each(ParseExpression):
- """Requires all given C{ParseExpression}s to be found, but in any order.
- Expressions may be separated by whitespace.
- May be constructed using the C{'&'} operator.
- """
- def __init__( self, exprs, savelist = True ):
- super(Each,self).__init__(exprs, savelist)
- self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
- self.skipWhitespace = True
- self.initExprGroups = True
-
- def parseImpl( self, instring, loc, doActions=True ):
- if self.initExprGroups:
- self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional))
- opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ]
- opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)]
- self.optionals = opt1 + opt2
- self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ]
- self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ]
- self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ]
- self.required += self.multirequired
- self.initExprGroups = False
- tmpLoc = loc
- tmpReqd = self.required[:]
- tmpOpt = self.optionals[:]
- matchOrder = []
-
- keepMatching = True
- while keepMatching:
- tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired
- failed = []
- for e in tmpExprs:
- try:
- tmpLoc = e.tryParse( instring, tmpLoc )
- except ParseException:
- failed.append(e)
- else:
- matchOrder.append(self.opt1map.get(id(e),e))
- if e in tmpReqd:
- tmpReqd.remove(e)
- elif e in tmpOpt:
- tmpOpt.remove(e)
- if len(failed) == len(tmpExprs):
- keepMatching = False
-
- if tmpReqd:
- missing = ", ".join(_ustr(e) for e in tmpReqd)
- raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing )
-
- # add any unmatched Optionals, in case they have default values defined
- matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt]
-
- resultlist = []
- for e in matchOrder:
- loc,results = e._parse(instring,loc,doActions)
- resultlist.append(results)
-
- finalResults = ParseResults([])
- for r in resultlist:
- dups = {}
- for k in r.keys():
- if k in finalResults:
- tmp = ParseResults(finalResults[k])
- tmp += ParseResults(r[k])
- dups[k] = tmp
- finalResults += ParseResults(r)
- for k,v in dups.items():
- finalResults[k] = v
- return loc, finalResults
-
- def __str__( self ):
- if hasattr(self,"name"):
- return self.name
-
- if self.strRepr is None:
- self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}"
-
- return self.strRepr
-
- def checkRecursion( self, parseElementList ):
- subRecCheckList = parseElementList[:] + [ self ]
- for e in self.exprs:
- e.checkRecursion( subRecCheckList )
-
-
-class ParseElementEnhance(ParserElement):
- """Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens."""
- def __init__( self, expr, savelist=False ):
- super(ParseElementEnhance,self).__init__(savelist)
- if isinstance( expr, basestring ):
- expr = Literal(expr)
- self.expr = expr
- self.strRepr = None
- if expr is not None:
- self.mayIndexError = expr.mayIndexError
- self.mayReturnEmpty = expr.mayReturnEmpty
- self.setWhitespaceChars( expr.whiteChars )
- self.skipWhitespace = expr.skipWhitespace
- self.saveAsList = expr.saveAsList
- self.callPreparse = expr.callPreparse
- self.ignoreExprs.extend(expr.ignoreExprs)
-
- def parseImpl( self, instring, loc, doActions=True ):
- if self.expr is not None:
- return self.expr._parse( instring, loc, doActions, callPreParse=False )
- else:
- raise ParseException("",loc,self.errmsg,self)
-
- def leaveWhitespace( self ):
- self.skipWhitespace = False
- self.expr = self.expr.copy()
- if self.expr is not None:
- self.expr.leaveWhitespace()
- return self
-
- def ignore( self, other ):
- if isinstance( other, Suppress ):
- if other not in self.ignoreExprs:
- super( ParseElementEnhance, self).ignore( other )
- if self.expr is not None:
- self.expr.ignore( self.ignoreExprs[-1] )
- else:
- super( ParseElementEnhance, self).ignore( other )
- if self.expr is not None:
- self.expr.ignore( self.ignoreExprs[-1] )
- return self
-
- def streamline( self ):
- super(ParseElementEnhance,self).streamline()
- if self.expr is not None:
- self.expr.streamline()
- return self
-
- def checkRecursion( self, parseElementList ):
- if self in parseElementList:
- raise RecursiveGrammarException( parseElementList+[self] )
- subRecCheckList = parseElementList[:] + [ self ]
- if self.expr is not None:
- self.expr.checkRecursion( subRecCheckList )
-
- def validate( self, validateTrace=[] ):
- tmp = validateTrace[:]+[self]
- if self.expr is not None:
- self.expr.validate(tmp)
- self.checkRecursion( [] )
-
- def __str__( self ):
- try:
- return super(ParseElementEnhance,self).__str__()
- except:
- pass
-
- if self.strRepr is None and self.expr is not None:
- self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) )
- return self.strRepr
-
-
-class FollowedBy(ParseElementEnhance):
- """Lookahead matching of the given parse expression. C{FollowedBy}
- does *not* advance the parsing position within the input string, it only
- verifies that the specified parse expression matches at the current
- position. C{FollowedBy} always returns a null token list."""
- def __init__( self, expr ):
- super(FollowedBy,self).__init__(expr)
- self.mayReturnEmpty = True
-
- def parseImpl( self, instring, loc, doActions=True ):
- self.expr.tryParse( instring, loc )
- return loc, []
-
-
-class NotAny(ParseElementEnhance):
- """Lookahead to disallow matching with the given parse expression. C{NotAny}
- does *not* advance the parsing position within the input string, it only
- verifies that the specified parse expression does *not* match at the current
- position. Also, C{NotAny} does *not* skip over leading whitespace. C{NotAny}
- always returns a null token list. May be constructed using the '~' operator."""
- def __init__( self, expr ):
- super(NotAny,self).__init__(expr)
- #~ self.leaveWhitespace()
- self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs
- self.mayReturnEmpty = True
- self.errmsg = "Found unwanted token, "+_ustr(self.expr)
-
- def parseImpl( self, instring, loc, doActions=True ):
- try:
- self.expr.tryParse( instring, loc )
- except (ParseException,IndexError):
- pass
- else:
- raise ParseException(instring, loc, self.errmsg, self)
- return loc, []
-
- def __str__( self ):
- if hasattr(self,"name"):
- return self.name
-
- if self.strRepr is None:
- self.strRepr = "~{" + _ustr(self.expr) + "}"
-
- return self.strRepr
-
-
-class ZeroOrMore(ParseElementEnhance):
- """Optional repetition of zero or more of the given expression."""
- def __init__( self, expr ):
- super(ZeroOrMore,self).__init__(expr)
- self.mayReturnEmpty = True
-
- def parseImpl( self, instring, loc, doActions=True ):
- tokens = []
- try:
- loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )
- hasIgnoreExprs = ( len(self.ignoreExprs) > 0 )
- while 1:
- if hasIgnoreExprs:
- preloc = self._skipIgnorables( instring, loc )
- else:
- preloc = loc
- loc, tmptokens = self.expr._parse( instring, preloc, doActions )
- if tmptokens or tmptokens.haskeys():
- tokens += tmptokens
- except (ParseException,IndexError):
- pass
-
- return loc, tokens
-
- def __str__( self ):
- if hasattr(self,"name"):
- return self.name
-
- if self.strRepr is None:
- self.strRepr = "[" + _ustr(self.expr) + "]..."
-
- return self.strRepr
-
- def setResultsName( self, name, listAllMatches=False ):
- ret = super(ZeroOrMore,self).setResultsName(name,listAllMatches)
- ret.saveAsList = True
- return ret
-
-
-class OneOrMore(ParseElementEnhance):
- """Repetition of one or more of the given expression."""
- def parseImpl( self, instring, loc, doActions=True ):
- # must be at least one
- loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )
- try:
- hasIgnoreExprs = ( len(self.ignoreExprs) > 0 )
- while 1:
- if hasIgnoreExprs:
- preloc = self._skipIgnorables( instring, loc )
- else:
- preloc = loc
- loc, tmptokens = self.expr._parse( instring, preloc, doActions )
- if tmptokens or tmptokens.haskeys():
- tokens += tmptokens
- except (ParseException,IndexError):
- pass
-
- return loc, tokens
-
- def __str__( self ):
- if hasattr(self,"name"):
- return self.name
-
- if self.strRepr is None:
- self.strRepr = "{" + _ustr(self.expr) + "}..."
-
- return self.strRepr
-
- def setResultsName( self, name, listAllMatches=False ):
- ret = super(OneOrMore,self).setResultsName(name,listAllMatches)
- ret.saveAsList = True
- return ret
-
-class _NullToken(object):
- def __bool__(self):
- return False
- __nonzero__ = __bool__
- def __str__(self):
- return ""
-
-_optionalNotMatched = _NullToken()
-class Optional(ParseElementEnhance):
- """Optional matching of the given expression.
- A default return string can also be specified, if the optional expression
- is not found.
- """
- def __init__( self, expr, default=_optionalNotMatched ):
- super(Optional,self).__init__( expr, savelist=False )
- self.defaultValue = default
- self.mayReturnEmpty = True
-
- def parseImpl( self, instring, loc, doActions=True ):
- try:
- loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )
- except (ParseException,IndexError):
- if self.defaultValue is not _optionalNotMatched:
- if self.expr.resultsName:
- tokens = ParseResults([ self.defaultValue ])
- tokens[self.expr.resultsName] = self.defaultValue
- else:
- tokens = [ self.defaultValue ]
- else:
- tokens = []
- return loc, tokens
-
- def __str__( self ):
- if hasattr(self,"name"):
- return self.name
-
- if self.strRepr is None:
- self.strRepr = "[" + _ustr(self.expr) + "]"
-
- return self.strRepr
-
-
-class SkipTo(ParseElementEnhance):
- """Token for skipping over all undefined text until the matched expression is found.
- If C{include} is set to true, the matched expression is also parsed (the skipped text
- and matched expression are returned as a 2-element list). The C{ignore}
- argument is used to define grammars (typically quoted strings and comments) that
- might contain false matches.
- """
- def __init__( self, other, include=False, ignore=None, failOn=None ):
- super( SkipTo, self ).__init__( other )
- self.ignoreExpr = ignore
- self.mayReturnEmpty = True
- self.mayIndexError = False
- self.includeMatch = include
- self.asList = False
- if failOn is not None and isinstance(failOn, basestring):
- self.failOn = Literal(failOn)
- else:
- self.failOn = failOn
- self.errmsg = "No match found for "+_ustr(self.expr)
-
- def parseImpl( self, instring, loc, doActions=True ):
- startLoc = loc
- instrlen = len(instring)
- expr = self.expr
- failParse = False
- while loc <= instrlen:
- try:
- if self.failOn:
- try:
- self.failOn.tryParse(instring, loc)
- except ParseBaseException:
- pass
- else:
- failParse = True
- raise ParseException(instring, loc, "Found expression " + str(self.failOn))
- failParse = False
- if self.ignoreExpr is not None:
- while 1:
- try:
- loc = self.ignoreExpr.tryParse(instring,loc)
- # print("found ignoreExpr, advance to", loc)
- except ParseBaseException:
- break
- expr._parse( instring, loc, doActions=False, callPreParse=False )
- skipText = instring[startLoc:loc]
- if self.includeMatch:
- loc,mat = expr._parse(instring,loc,doActions,callPreParse=False)
- if mat:
- skipRes = ParseResults( skipText )
- skipRes += mat
- return loc, [ skipRes ]
- else:
- return loc, [ skipText ]
- else:
- return loc, [ skipText ]
- except (ParseException,IndexError):
- if failParse:
- raise
- else:
- loc += 1
- raise ParseException(instring, loc, self.errmsg, self)
-
-class Forward(ParseElementEnhance):
- """Forward declaration of an expression to be defined later -
- used for recursive grammars, such as algebraic infix notation.
- When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator.
-
- Note: take care when assigning to C{Forward} not to overlook precedence of operators.
- Specifically, '|' has a lower precedence than '<<', so that::
- fwdExpr << a | b | c
- will actually be evaluated as::
- (fwdExpr << a) | b | c
- thereby leaving b and c out as parseable alternatives. It is recommended that you
- explicitly group the values inserted into the C{Forward}::
- fwdExpr << (a | b | c)
- Converting to use the '<<=' operator instead will avoid this problem.
- """
- def __init__( self, other=None ):
- super(Forward,self).__init__( other, savelist=False )
-
- def __lshift__( self, other ):
- if isinstance( other, basestring ):
- other = ParserElement.literalStringClass(other)
- self.expr = other
- self.mayReturnEmpty = other.mayReturnEmpty
- self.strRepr = None
- self.mayIndexError = self.expr.mayIndexError
- self.mayReturnEmpty = self.expr.mayReturnEmpty
- self.setWhitespaceChars( self.expr.whiteChars )
- self.skipWhitespace = self.expr.skipWhitespace
- self.saveAsList = self.expr.saveAsList
- self.ignoreExprs.extend(self.expr.ignoreExprs)
- return self
-
- def __ilshift__(self, other):
- return self << other
-
- def leaveWhitespace( self ):
- self.skipWhitespace = False
- return self
-
- def streamline( self ):
- if not self.streamlined:
- self.streamlined = True
- if self.expr is not None:
- self.expr.streamline()
- return self
-
- def validate( self, validateTrace=[] ):
- if self not in validateTrace:
- tmp = validateTrace[:]+[self]
- if self.expr is not None:
- self.expr.validate(tmp)
- self.checkRecursion([])
-
- def __str__( self ):
- if hasattr(self,"name"):
- return self.name
-
- self._revertClass = self.__class__
- self.__class__ = _ForwardNoRecurse
- try:
- if self.expr is not None:
- retString = _ustr(self.expr)
- else:
- retString = "None"
- finally:
- self.__class__ = self._revertClass
- return self.__class__.__name__ + ": " + retString
-
- def copy(self):
- if self.expr is not None:
- return super(Forward,self).copy()
- else:
- ret = Forward()
- ret <<= self
- return ret
-
-class _ForwardNoRecurse(Forward):
- def __str__( self ):
- return "..."
-
-class TokenConverter(ParseElementEnhance):
- """Abstract subclass of C{ParseExpression}, for converting parsed results."""
- def __init__( self, expr, savelist=False ):
- super(TokenConverter,self).__init__( expr )#, savelist )
- self.saveAsList = False
-
-class Upcase(TokenConverter):
- """Converter to upper case all matching tokens."""
- def __init__(self, *args):
- super(Upcase,self).__init__(*args)
- warnings.warn("Upcase class is deprecated, use upcaseTokens parse action instead",
- DeprecationWarning,stacklevel=2)
-
- def postParse( self, instring, loc, tokenlist ):
- return list(map( str.upper, tokenlist ))
-
-
-class Combine(TokenConverter):
- """Converter to concatenate all matching tokens to a single string.
- By default, the matching patterns must also be contiguous in the input string;
- this can be disabled by specifying C{'adjacent=False'} in the constructor.
- """
- def __init__( self, expr, joinString="", adjacent=True ):
- super(Combine,self).__init__( expr )
- # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself
- if adjacent:
- self.leaveWhitespace()
- self.adjacent = adjacent
- self.skipWhitespace = True
- self.joinString = joinString
- self.callPreparse = True
-
- def ignore( self, other ):
- if self.adjacent:
- ParserElement.ignore(self, other)
- else:
- super( Combine, self).ignore( other )
- return self
-
- def postParse( self, instring, loc, tokenlist ):
- retToks = tokenlist.copy()
- del retToks[:]
- retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults)
-
- if self.resultsName and retToks.haskeys():
- return [ retToks ]
- else:
- return retToks
-
-class Group(TokenConverter):
- """Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions."""
- def __init__( self, expr ):
- super(Group,self).__init__( expr )
- self.saveAsList = True
-
- def postParse( self, instring, loc, tokenlist ):
- return [ tokenlist ]
-
-class Dict(TokenConverter):
- """Converter to return a repetitive expression as a list, but also as a dictionary.
- Each element can also be referenced using the first token in the expression as its key.
- Useful for tabular report scraping when the first column can be used as a item key.
- """
- def __init__( self, expr ):
- super(Dict,self).__init__( expr )
- self.saveAsList = True
-
- def postParse( self, instring, loc, tokenlist ):
- for i,tok in enumerate(tokenlist):
- if len(tok) == 0:
- continue
- ikey = tok[0]
- if isinstance(ikey,int):
- ikey = _ustr(tok[0]).strip()
- if len(tok)==1:
- tokenlist[ikey] = _ParseResultsWithOffset("",i)
- elif len(tok)==2 and not isinstance(tok[1],ParseResults):
- tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i)
- else:
- dictvalue = tok.copy() #ParseResults(i)
- del dictvalue[0]
- if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()):
- tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i)
- else:
- tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i)
-
- if self.resultsName:
- return [ tokenlist ]
- else:
- return tokenlist
-
-
-class Suppress(TokenConverter):
- """Converter for ignoring the results of a parsed expression."""
- def postParse( self, instring, loc, tokenlist ):
- return []
-
- def suppress( self ):
- return self
-
-
-class OnlyOnce(object):
- """Wrapper for parse actions, to ensure they are only called once."""
- def __init__(self, methodCall):
- self.callable = _trim_arity(methodCall)
- self.called = False
- def __call__(self,s,l,t):
- if not self.called:
- results = self.callable(s,l,t)
- self.called = True
- return results
- raise ParseException(s,l,"")
- def reset(self):
- self.called = False
-
-def traceParseAction(f):
- """Decorator for debugging parse actions."""
- f = _trim_arity(f)
- def z(*paArgs):
- thisFunc = f.func_name
- s,l,t = paArgs[-3:]
- if len(paArgs)>3:
- thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc
- sys.stderr.write( ">>entering %s(line: '%s', %d, %s)\n" % (thisFunc,line(l,s),l,t) )
- try:
- ret = f(*paArgs)
- except Exception as exc:
- sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) )
- raise
- sys.stderr.write( "<<leaving %s (ret: %s)\n" % (thisFunc,ret) )
- return ret
- try:
- z.__name__ = f.__name__
- except AttributeError:
- pass
- return z
-
-#
-# global helpers
-#
-def delimitedList( expr, delim=",", combine=False ):
- """Helper to define a delimited list of expressions - the delimiter defaults to ','.
- By default, the list elements and delimiters can have intervening whitespace, and
- comments, but this can be overridden by passing C{combine=True} in the constructor.
- If C{combine} is set to C{True}, the matching tokens are returned as a single token
- string, with the delimiters included; otherwise, the matching tokens are returned
- as a list of tokens, with the delimiters suppressed.
- """
- dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..."
- if combine:
- return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName)
- else:
- return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName)
-
-def countedArray( expr, intExpr=None ):
- """Helper to define a counted list of expressions.
- This helper defines a pattern of the form::
- integer expr expr expr...
- where the leading integer tells how many expr expressions follow.
- The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed.
- """
- arrayExpr = Forward()
- def countFieldParseAction(s,l,t):
- n = t[0]
- arrayExpr << (n and Group(And([expr]*n)) or Group(empty))
- return []
- if intExpr is None:
- intExpr = Word(nums).setParseAction(lambda t:int(t[0]))
- else:
- intExpr = intExpr.copy()
- intExpr.setName("arrayLen")
- intExpr.addParseAction(countFieldParseAction, callDuringTry=True)
- return ( intExpr + arrayExpr )
-
-def _flatten(L):
- ret = []
- for i in L:
- if isinstance(i,list):
- ret.extend(_flatten(i))
- else:
- ret.append(i)
- return ret
-
-def matchPreviousLiteral(expr):
- """Helper to define an expression that is indirectly defined from
- the tokens matched in a previous expression, that is, it looks
- for a 'repeat' of a previous expression. For example::
- first = Word(nums)
- second = matchPreviousLiteral(first)
- matchExpr = first + ":" + second
- will match C{"1:1"}, but not C{"1:2"}. Because this matches a
- previous literal, will also match the leading C{"1:1"} in C{"1:10"}.
- If this is not desired, use C{matchPreviousExpr}.
- Do *not* use with packrat parsing enabled.
- """
- rep = Forward()
- def copyTokenToRepeater(s,l,t):
- if t:
- if len(t) == 1:
- rep << t[0]
- else:
- # flatten t tokens
- tflat = _flatten(t.asList())
- rep << And( [ Literal(tt) for tt in tflat ] )
- else:
- rep << Empty()
- expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
- return rep
-
-def matchPreviousExpr(expr):
- """Helper to define an expression that is indirectly defined from
- the tokens matched in a previous expression, that is, it looks
- for a 'repeat' of a previous expression. For example::
- first = Word(nums)
- second = matchPreviousExpr(first)
- matchExpr = first + ":" + second
- will match C{"1:1"}, but not C{"1:2"}. Because this matches by
- expressions, will *not* match the leading C{"1:1"} in C{"1:10"};
- the expressions are evaluated first, and then compared, so
- C{"1"} is compared with C{"10"}.
- Do *not* use with packrat parsing enabled.
- """
- rep = Forward()
- e2 = expr.copy()
- rep <<= e2
- def copyTokenToRepeater(s,l,t):
- matchTokens = _flatten(t.asList())
- def mustMatchTheseTokens(s,l,t):
- theseTokens = _flatten(t.asList())
- if theseTokens != matchTokens:
- raise ParseException("",0,"")
- rep.setParseAction( mustMatchTheseTokens, callDuringTry=True )
- expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
- return rep
-
-def _escapeRegexRangeChars(s):
- #~ escape these chars: ^-]
- for c in r"\^-]":
- s = s.replace(c,_bslash+c)
- s = s.replace("\n",r"\n")
- s = s.replace("\t",r"\t")
- return _ustr(s)
-
-def oneOf( strs, caseless=False, useRegex=True ):
- """Helper to quickly define a set of alternative Literals, and makes sure to do
- longest-first testing when there is a conflict, regardless of the input order,
- but returns a C{L{MatchFirst}} for best performance.
-
- Parameters:
- - strs - a string of space-delimited literals, or a list of string literals
- - caseless - (default=False) - treat all literals as caseless
- - useRegex - (default=True) - as an optimization, will generate a Regex
- object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or
- if creating a C{Regex} raises an exception)
- """
- if caseless:
- isequal = ( lambda a,b: a.upper() == b.upper() )
- masks = ( lambda a,b: b.upper().startswith(a.upper()) )
- parseElementClass = CaselessLiteral
- else:
- isequal = ( lambda a,b: a == b )
- masks = ( lambda a,b: b.startswith(a) )
- parseElementClass = Literal
-
- symbols = []
- if isinstance(strs,basestring):
- symbols = strs.split()
- elif isinstance(strs, collections.Sequence):
- symbols = list(strs[:])
- elif isinstance(strs, _generatorType):
- symbols = list(strs)
- else:
- warnings.warn("Invalid argument to oneOf, expected string or list",
- SyntaxWarning, stacklevel=2)
- if not symbols:
- return NoMatch()
-
- i = 0
- while i < len(symbols)-1:
- cur = symbols[i]
- for j,other in enumerate(symbols[i+1:]):
- if ( isequal(other, cur) ):
- del symbols[i+j+1]
- break
- elif ( masks(cur, other) ):
- del symbols[i+j+1]
- symbols.insert(i,other)
- cur = other
- break
- else:
- i += 1
-
- if not caseless and useRegex:
- #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] ))
- try:
- if len(symbols)==len("".join(symbols)):
- return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) )
- else:
- return Regex( "|".join(re.escape(sym) for sym in symbols) )
- except:
- warnings.warn("Exception creating Regex for oneOf, building MatchFirst",
- SyntaxWarning, stacklevel=2)
-
-
- # last resort, just use MatchFirst
- return MatchFirst( [ parseElementClass(sym) for sym in symbols ] )
-
-def dictOf( key, value ):
- """Helper to easily and clearly define a dictionary by specifying the respective patterns
- for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens
- in the proper order. The key pattern can include delimiting markers or punctuation,
- as long as they are suppressed, thereby leaving the significant key text. The value
- pattern can include named results, so that the C{Dict} results can include named token
- fields.
- """
- return Dict( ZeroOrMore( Group ( key + value ) ) )
-
-def originalTextFor(expr, asString=True):
- """Helper to return the original, untokenized text for a given expression. Useful to
- restore the parsed fields of an HTML start tag into the raw tag text itself, or to
- revert separate tokens with intervening whitespace back to the original matching
- input text. Simpler to use than the parse action C{L{keepOriginalText}}, and does not
- require the inspect module to chase up the call stack. By default, returns a
- string containing the original parsed text.
-
- If the optional C{asString} argument is passed as C{False}, then the return value is a
- C{L{ParseResults}} containing any results names that were originally matched, and a
- single token containing the original matched text from the input string. So if
- the expression passed to C{L{originalTextFor}} contains expressions with defined
- results names, you must set C{asString} to C{False} if you want to preserve those
- results name values."""
- locMarker = Empty().setParseAction(lambda s,loc,t: loc)
- endlocMarker = locMarker.copy()
- endlocMarker.callPreparse = False
- matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end")
- if asString:
- extractText = lambda s,l,t: s[t._original_start:t._original_end]
- else:
- def extractText(s,l,t):
- del t[:]
- t.insert(0, s[t._original_start:t._original_end])
- del t["_original_start"]
- del t["_original_end"]
- matchExpr.setParseAction(extractText)
- return matchExpr
-
-def ungroup(expr):
- """Helper to undo pyparsing's default grouping of And expressions, even
- if all but one are non-empty."""
- return TokenConverter(expr).setParseAction(lambda t:t[0])
-
-def locatedExpr(expr):
- """Helper to decorate a returned token with its starting and ending locations in the input string.
- This helper adds the following results names:
- - locn_start = location where matched expression begins
- - locn_end = location where matched expression ends
- - value = the actual parsed results
-
- Be careful if the input text contains C{<TAB>} characters, you may want to call
- C{L{ParserElement.parseWithTabs}}
- """
- locator = Empty().setParseAction(lambda s,l,t: l)
- return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end"))
-
-
-# convenience constants for positional expressions
-empty = Empty().setName("empty")
-lineStart = LineStart().setName("lineStart")
-lineEnd = LineEnd().setName("lineEnd")
-stringStart = StringStart().setName("stringStart")
-stringEnd = StringEnd().setName("stringEnd")
-
-_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])
-_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16)))
-_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8)))
-_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(printables, excludeChars=r'\]', exact=1) | Regex(r"\w", re.UNICODE)
-_charRange = Group(_singleChar + Suppress("-") + _singleChar)
-_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"
-
-def srange(s):
- r"""Helper to easily define string ranges for use in Word construction. Borrows
- syntax from regexp '[]' string range definitions::
- srange("[0-9]") -> "0123456789"
- srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz"
- srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
- The input string must be enclosed in []'s, and the returned string is the expanded
- character set joined into a single string.
- The values enclosed in the []'s may be::
- a single character
- an escaped character with a leading backslash (such as \- or \])
- an escaped hex character with a leading '\x' (\x21, which is a '!' character)
- (\0x## is also supported for backwards compatibility)
- an escaped octal character with a leading '\0' (\041, which is a '!' character)
- a range of any of the above, separated by a dash ('a-z', etc.)
- any combination of the above ('aeiouy', 'a-zA-Z0-9_$', etc.)
- """
- _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1))
- try:
- return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body)
- except:
- return ""
-
-def matchOnlyAtCol(n):
- """Helper method for defining parse actions that require matching at a specific
- column in the input text.
- """
- def verifyCol(strg,locn,toks):
- if col(locn,strg) != n:
- raise ParseException(strg,locn,"matched token not at column %d" % n)
- return verifyCol
-
-def replaceWith(replStr):
- """Helper method for common parse actions that simply return a literal value. Especially
- useful when used with C{L{transformString<ParserElement.transformString>}()}.
- """
- #def _replFunc(*args):
- # return [replStr]
- #return _replFunc
- return functools.partial(next, itertools.repeat([replStr]))
-
-def removeQuotes(s,l,t):
- """Helper parse action for removing quotation marks from parsed quoted strings.
- To use, add this parse action to quoted string using::
- quotedString.setParseAction( removeQuotes )
- """
- return t[0][1:-1]
-
-def upcaseTokens(s,l,t):
- """Helper parse action to convert tokens to upper case."""
- return [ tt.upper() for tt in map(_ustr,t) ]
-
-def downcaseTokens(s,l,t):
- """Helper parse action to convert tokens to lower case."""
- return [ tt.lower() for tt in map(_ustr,t) ]
-
-def keepOriginalText(s,startLoc,t):
- """DEPRECATED - use new helper method C{L{originalTextFor}}.
- Helper parse action to preserve original parsed text,
- overriding any nested parse actions."""
- try:
- endloc = getTokensEndLoc()
- except ParseException:
- raise ParseFatalException("incorrect usage of keepOriginalText - may only be called as a parse action")
- del t[:]
- t += ParseResults(s[startLoc:endloc])
- return t
-
-def getTokensEndLoc():
- """Method to be called from within a parse action to determine the end
- location of the parsed tokens."""
- import inspect
- fstack = inspect.stack()
- try:
- # search up the stack (through intervening argument normalizers) for correct calling routine
- for f in fstack[2:]:
- if f[3] == "_parseNoCache":
- endloc = f[0].f_locals["loc"]
- return endloc
- else:
- raise ParseFatalException("incorrect usage of getTokensEndLoc - may only be called from within a parse action")
- finally:
- del fstack
-
-def _makeTags(tagStr, xml):
- """Internal helper to construct opening and closing tag expressions, given a tag name"""
- if isinstance(tagStr,basestring):
- resname = tagStr
- tagStr = Keyword(tagStr, caseless=not xml)
- else:
- resname = tagStr.name
-
- tagAttrName = Word(alphas,alphanums+"_-:")
- if (xml):
- tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes )
- openTag = Suppress("<") + tagStr("tag") + \
- Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \
- Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
- else:
- printablesLessRAbrack = "".join(c for c in printables if c not in ">")
- tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack)
- openTag = Suppress("<") + tagStr("tag") + \
- Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \
- Optional( Suppress("=") + tagAttrValue ) ))) + \
- Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
- closeTag = Combine(_L("</") + tagStr + ">")
-
- openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % tagStr)
- closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % tagStr)
- openTag.tag = resname
- closeTag.tag = resname
- return openTag, closeTag
-
-def makeHTMLTags(tagStr):
- """Helper to construct opening and closing tag expressions for HTML, given a tag name"""
- return _makeTags( tagStr, False )
-
-def makeXMLTags(tagStr):
- """Helper to construct opening and closing tag expressions for XML, given a tag name"""
- return _makeTags( tagStr, True )
-
-def withAttribute(*args,**attrDict):
- """Helper to create a validating parse action to be used with start tags created
- with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag
- with a required attribute value, to avoid false matches on common tags such as
- C{<TD>} or C{<DIV>}.
-
- Call C{withAttribute} with a series of attribute names and values. Specify the list
- of filter attributes names and values as:
- - keyword arguments, as in C{(align="right")}, or
- - as an explicit dict with C{**} operator, when an attribute name is also a Python
- reserved word, as in C{**{"class":"Customer", "align":"right"}}
- - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") )
- For attribute names with a namespace prefix, you must use the second form. Attribute
- names are matched insensitive to upper/lower case.
-
- If just testing for C{class} (with or without a namespace), use C{L{withClass}}.
-
- To verify that the attribute exists, but without specifying a value, pass
- C{withAttribute.ANY_VALUE} as the value.
- """
- if args:
- attrs = args[:]
- else:
- attrs = attrDict.items()
- attrs = [(k,v) for k,v in attrs]
- def pa(s,l,tokens):
- for attrName,attrValue in attrs:
- if attrName not in tokens:
- raise ParseException(s,l,"no matching attribute " + attrName)
- if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue:
- raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" %
- (attrName, tokens[attrName], attrValue))
- return pa
-withAttribute.ANY_VALUE = object()
-
-def withClass(classname, namespace=''):
- """Simplified version of C{L{withAttribute}} when matching on a div class - made
- difficult because C{class} is a reserved word in Python.
- """
- classattr = "%s:class" % namespace if namespace else "class"
- return withAttribute(**{classattr : classname})
-
-opAssoc = _Constants()
-opAssoc.LEFT = object()
-opAssoc.RIGHT = object()
-
-def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ):
- """Helper method for constructing grammars of expressions made up of
- operators working in a precedence hierarchy. Operators may be unary or
- binary, left- or right-associative. Parse actions can also be attached
- to operator expressions.
-
- Parameters:
- - baseExpr - expression representing the most basic element for the nested
- - opList - list of tuples, one for each operator precedence level in the
- expression grammar; each tuple is of the form
- (opExpr, numTerms, rightLeftAssoc, parseAction), where:
- - opExpr is the pyparsing expression for the operator;
- may also be a string, which will be converted to a Literal;
- if numTerms is 3, opExpr is a tuple of two expressions, for the
- two operators separating the 3 terms
- - numTerms is the number of terms for this operator (must
- be 1, 2, or 3)
- - rightLeftAssoc is the indicator whether the operator is
- right or left associative, using the pyparsing-defined
- constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}.
- - parseAction is the parse action to be associated with
- expressions matching this operator expression (the
- parse action tuple member may be omitted)
- - lpar - expression for matching left-parentheses (default=Suppress('('))
- - rpar - expression for matching right-parentheses (default=Suppress(')'))
- """
- ret = Forward()
- lastExpr = baseExpr | ( lpar + ret + rpar )
- for i,operDef in enumerate(opList):
- opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4]
- if arity == 3:
- if opExpr is None or len(opExpr) != 2:
- raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions")
- opExpr1, opExpr2 = opExpr
- thisExpr = Forward()#.setName("expr%d" % i)
- if rightLeftAssoc == opAssoc.LEFT:
- if arity == 1:
- matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) )
- elif arity == 2:
- if opExpr is not None:
- matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) )
- else:
- matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) )
- elif arity == 3:
- matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \
- Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr )
- else:
- raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
- elif rightLeftAssoc == opAssoc.RIGHT:
- if arity == 1:
- # try to avoid LR with this extra test
- if not isinstance(opExpr, Optional):
- opExpr = Optional(opExpr)
- matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr )
- elif arity == 2:
- if opExpr is not None:
- matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) )
- else:
- matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) )
- elif arity == 3:
- matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \
- Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr )
- else:
- raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
- else:
- raise ValueError("operator must indicate right or left associativity")
- if pa:
- matchExpr.setParseAction( pa )
- thisExpr <<= ( matchExpr | lastExpr )
- lastExpr = thisExpr
- ret <<= lastExpr
- return ret
-operatorPrecedence = infixNotation
-
-dblQuotedString = Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\x[0-9a-fA-F]+)|(?:\\.))*"').setName("string enclosed in double quotes")
-sglQuotedString = Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\x[0-9a-fA-F]+)|(?:\\.))*'").setName("string enclosed in single quotes")
-quotedString = Regex(r'''(?:"(?:[^"\n\r\\]|(?:"")|(?:\\x[0-9a-fA-F]+)|(?:\\.))*")|(?:'(?:[^'\n\r\\]|(?:'')|(?:\\x[0-9a-fA-F]+)|(?:\\.))*')''').setName("quotedString using single or double quotes")
-unicodeString = Combine(_L('u') + quotedString.copy())
-
-def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()):
- """Helper method for defining nested lists enclosed in opening and closing
- delimiters ("(" and ")" are the default).
-
- Parameters:
- - opener - opening character for a nested list (default="("); can also be a pyparsing expression
- - closer - closing character for a nested list (default=")"); can also be a pyparsing expression
- - content - expression for items within the nested lists (default=None)
- - ignoreExpr - expression for ignoring opening and closing delimiters (default=quotedString)
-
- If an expression is not provided for the content argument, the nested
- expression will capture all whitespace-delimited content between delimiters
- as a list of separate values.
-
- Use the C{ignoreExpr} argument to define expressions that may contain
- opening or closing characters that should not be treated as opening
- or closing characters for nesting, such as quotedString or a comment
- expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}.
- The default is L{quotedString}, but if no expressions are to be ignored,
- then pass C{None} for this argument.
- """
- if opener == closer:
- raise ValueError("opening and closing strings cannot be the same")
- if content is None:
- if isinstance(opener,basestring) and isinstance(closer,basestring):
- if len(opener) == 1 and len(closer)==1:
- if ignoreExpr is not None:
- content = (Combine(OneOrMore(~ignoreExpr +
- CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1))
- ).setParseAction(lambda t:t[0].strip()))
- else:
- content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS
- ).setParseAction(lambda t:t[0].strip()))
- else:
- if ignoreExpr is not None:
- content = (Combine(OneOrMore(~ignoreExpr +
- ~Literal(opener) + ~Literal(closer) +
- CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))
- ).setParseAction(lambda t:t[0].strip()))
- else:
- content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) +
- CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))
- ).setParseAction(lambda t:t[0].strip()))
- else:
- raise ValueError("opening and closing arguments must be strings if no content expression is given")
- ret = Forward()
- if ignoreExpr is not None:
- ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) )
- else:
- ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) )
- return ret
-
-def indentedBlock(blockStatementExpr, indentStack, indent=True):
- """Helper method for defining space-delimited indentation blocks, such as
- those used to define block statements in Python source code.
-
- Parameters:
- - blockStatementExpr - expression defining syntax of statement that
- is repeated within the indented block
- - indentStack - list created by caller to manage indentation stack
- (multiple statementWithIndentedBlock expressions within a single grammar
- should share a common indentStack)
- - indent - boolean indicating whether block must be indented beyond the
- the current level; set to False for block of left-most statements
- (default=True)
-
- A valid block must contain at least one C{blockStatement}.
- """
- def checkPeerIndent(s,l,t):
- if l >= len(s): return
- curCol = col(l,s)
- if curCol != indentStack[-1]:
- if curCol > indentStack[-1]:
- raise ParseFatalException(s,l,"illegal nesting")
- raise ParseException(s,l,"not a peer entry")
-
- def checkSubIndent(s,l,t):
- curCol = col(l,s)
- if curCol > indentStack[-1]:
- indentStack.append( curCol )
- else:
- raise ParseException(s,l,"not a subentry")
-
- def checkUnindent(s,l,t):
- if l >= len(s): return
- curCol = col(l,s)
- if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]):
- raise ParseException(s,l,"not an unindent")
- indentStack.pop()
-
- NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress())
- INDENT = Empty() + Empty().setParseAction(checkSubIndent)
- PEER = Empty().setParseAction(checkPeerIndent)
- UNDENT = Empty().setParseAction(checkUnindent)
- if indent:
- smExpr = Group( Optional(NL) +
- #~ FollowedBy(blockStatementExpr) +
- INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT)
- else:
- smExpr = Group( Optional(NL) +
- (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) )
- blockStatementExpr.ignore(_bslash + LineEnd())
- return smExpr
-
-alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]")
-punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]")
-
-anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:"))
-commonHTMLEntity = Combine(_L("&") + oneOf("gt lt amp nbsp quot").setResultsName("entity") +";").streamline()
-_htmlEntityMap = dict(zip("gt lt amp nbsp quot".split(),'><& "'))
-replaceHTMLEntity = lambda t : t.entity in _htmlEntityMap and _htmlEntityMap[t.entity] or None
-
-# it's easy to get these comment structures wrong - they're very common, so may as well make them available
-cStyleComment = Regex(r"/\*(?:[^*]*\*+)+?/").setName("C style comment")
-
-htmlComment = Regex(r"<!--[\s\S]*?-->")
-restOfLine = Regex(r".*").leaveWhitespace()
-dblSlashComment = Regex(r"\/\/(\\\n|.)*").setName("// comment")
-cppStyleComment = Regex(r"/(?:\*(?:[^*]*\*+)+?/|/[^\n]*(?:\n[^\n]*)*?(?:(?<!\\)|\Z))").setName("C++ style comment")
-
-javaStyleComment = cppStyleComment
-pythonStyleComment = Regex(r"#.*").setName("Python style comment")
-_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') +
- Optional( Word(" \t") +
- ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem")
-commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList")
-
-
-if __name__ == "__main__":
-
- selectToken = CaselessLiteral( "select" )
- fromToken = CaselessLiteral( "from" )
-
- ident = Word( alphas, alphanums + "_$" )
- columnName = delimitedList( ident, ".", combine=True ).setParseAction( upcaseTokens )
- columnNameList = Group( delimitedList( columnName ) ).setName("columns")
- tableName = delimitedList( ident, ".", combine=True ).setParseAction( upcaseTokens )
- tableNameList = Group( delimitedList( tableName ) ).setName("tables")
- simpleSQL = ( selectToken + \
- ( '*' | columnNameList ).setResultsName( "columns" ) + \
- fromToken + \
- tableNameList.setResultsName( "tables" ) )
-
- simpleSQL.runTests("""\
- SELECT * from XYZZY, ABC
- select * from SYS.XYZZY
- Select A from Sys.dual
- Select AA,BB,CC from Sys.dual
- Select A, B, C from Sys.dual
- Select A, B, C from Sys.dual
- Xelect A, B, C from Sys.dual
- Select A, B, C frox Sys.dual
- Select
- Select ^^^ frox Sys.dual
- Select A, B, C from Sys.dual, Table2""")
-
diff --git a/pkg_resources/_vendor/six.py b/pkg_resources/_vendor/six.py
deleted file mode 100644
index 190c0239..00000000
--- a/pkg_resources/_vendor/six.py
+++ /dev/null
@@ -1,868 +0,0 @@
-"""Utilities for writing code that runs on Python 2 and 3"""
-
-# Copyright (c) 2010-2015 Benjamin Peterson
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in all
-# copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-
-from __future__ import absolute_import
-
-import functools
-import itertools
-import operator
-import sys
-import types
-
-__author__ = "Benjamin Peterson <benjamin@python.org>"
-__version__ = "1.10.0"
-
-
-# Useful for very coarse version differentiation.
-PY2 = sys.version_info[0] == 2
-PY3 = sys.version_info[0] == 3
-PY34 = sys.version_info[0:2] >= (3, 4)
-
-if PY3:
- string_types = str,
- integer_types = int,
- class_types = type,
- text_type = str
- binary_type = bytes
-
- MAXSIZE = sys.maxsize
-else:
- string_types = basestring,
- integer_types = (int, long)
- class_types = (type, types.ClassType)
- text_type = unicode
- binary_type = str
-
- if sys.platform.startswith("java"):
- # Jython always uses 32 bits.
- MAXSIZE = int((1 << 31) - 1)
- else:
- # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
- class X(object):
-
- def __len__(self):
- return 1 << 31
- try:
- len(X())
- except OverflowError:
- # 32-bit
- MAXSIZE = int((1 << 31) - 1)
- else:
- # 64-bit
- MAXSIZE = int((1 << 63) - 1)
- del X
-
-
-def _add_doc(func, doc):
- """Add documentation to a function."""
- func.__doc__ = doc
-
-
-def _import_module(name):
- """Import module, returning the module after the last dot."""
- __import__(name)
- return sys.modules[name]
-
-
-class _LazyDescr(object):
-
- def __init__(self, name):
- self.name = name
-
- def __get__(self, obj, tp):
- result = self._resolve()
- setattr(obj, self.name, result) # Invokes __set__.
- try:
- # This is a bit ugly, but it avoids running this again by
- # removing this descriptor.
- delattr(obj.__class__, self.name)
- except AttributeError:
- pass
- return result
-
-
-class MovedModule(_LazyDescr):
-
- def __init__(self, name, old, new=None):
- super(MovedModule, self).__init__(name)
- if PY3:
- if new is None:
- new = name
- self.mod = new
- else:
- self.mod = old
-
- def _resolve(self):
- return _import_module(self.mod)
-
- def __getattr__(self, attr):
- _module = self._resolve()
- value = getattr(_module, attr)
- setattr(self, attr, value)
- return value
-
-
-class _LazyModule(types.ModuleType):
-
- def __init__(self, name):
- super(_LazyModule, self).__init__(name)
- self.__doc__ = self.__class__.__doc__
-
- def __dir__(self):
- attrs = ["__doc__", "__name__"]
- attrs += [attr.name for attr in self._moved_attributes]
- return attrs
-
- # Subclasses should override this
- _moved_attributes = []
-
-
-class MovedAttribute(_LazyDescr):
-
- def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
- super(MovedAttribute, self).__init__(name)
- if PY3:
- if new_mod is None:
- new_mod = name
- self.mod = new_mod
- if new_attr is None:
- if old_attr is None:
- new_attr = name
- else:
- new_attr = old_attr
- self.attr = new_attr
- else:
- self.mod = old_mod
- if old_attr is None:
- old_attr = name
- self.attr = old_attr
-
- def _resolve(self):
- module = _import_module(self.mod)
- return getattr(module, self.attr)
-
-
-class _SixMetaPathImporter(object):
-
- """
- A meta path importer to import six.moves and its submodules.
-
- This class implements a PEP302 finder and loader. It should be compatible
- with Python 2.5 and all existing versions of Python3
- """
-
- def __init__(self, six_module_name):
- self.name = six_module_name
- self.known_modules = {}
-
- def _add_module(self, mod, *fullnames):
- for fullname in fullnames:
- self.known_modules[self.name + "." + fullname] = mod
-
- def _get_module(self, fullname):
- return self.known_modules[self.name + "." + fullname]
-
- def find_module(self, fullname, path=None):
- if fullname in self.known_modules:
- return self
- return None
-
- def __get_module(self, fullname):
- try:
- return self.known_modules[fullname]
- except KeyError:
- raise ImportError("This loader does not know module " + fullname)
-
- def load_module(self, fullname):
- try:
- # in case of a reload
- return sys.modules[fullname]
- except KeyError:
- pass
- mod = self.__get_module(fullname)
- if isinstance(mod, MovedModule):
- mod = mod._resolve()
- else:
- mod.__loader__ = self
- sys.modules[fullname] = mod
- return mod
-
- def is_package(self, fullname):
- """
- Return true, if the named module is a package.
-
- We need this method to get correct spec objects with
- Python 3.4 (see PEP451)
- """
- return hasattr(self.__get_module(fullname), "__path__")
-
- def get_code(self, fullname):
- """Return None
-
- Required, if is_package is implemented"""
- self.__get_module(fullname) # eventually raises ImportError
- return None
- get_source = get_code # same as get_code
-
-_importer = _SixMetaPathImporter(__name__)
-
-
-class _MovedItems(_LazyModule):
-
- """Lazy loading of moved objects"""
- __path__ = [] # mark as package
-
-
-_moved_attributes = [
- MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
- MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
- MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
- MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
- MovedAttribute("intern", "__builtin__", "sys"),
- MovedAttribute("map", "itertools", "builtins", "imap", "map"),
- MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
- MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
- MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
- MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
- MovedAttribute("reduce", "__builtin__", "functools"),
- MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
- MovedAttribute("StringIO", "StringIO", "io"),
- MovedAttribute("UserDict", "UserDict", "collections"),
- MovedAttribute("UserList", "UserList", "collections"),
- MovedAttribute("UserString", "UserString", "collections"),
- MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
- MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
- MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
- MovedModule("builtins", "__builtin__"),
- MovedModule("configparser", "ConfigParser"),
- MovedModule("copyreg", "copy_reg"),
- MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
- MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
- MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
- MovedModule("http_cookies", "Cookie", "http.cookies"),
- MovedModule("html_entities", "htmlentitydefs", "html.entities"),
- MovedModule("html_parser", "HTMLParser", "html.parser"),
- MovedModule("http_client", "httplib", "http.client"),
- MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
- MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
- MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
- MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
- MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
- MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
- MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
- MovedModule("cPickle", "cPickle", "pickle"),
- MovedModule("queue", "Queue"),
- MovedModule("reprlib", "repr"),
- MovedModule("socketserver", "SocketServer"),
- MovedModule("_thread", "thread", "_thread"),
- MovedModule("tkinter", "Tkinter"),
- MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
- MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
- MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
- MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
- MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
- MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
- MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
- MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
- MovedModule("tkinter_colorchooser", "tkColorChooser",
- "tkinter.colorchooser"),
- MovedModule("tkinter_commondialog", "tkCommonDialog",
- "tkinter.commondialog"),
- MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
- MovedModule("tkinter_font", "tkFont", "tkinter.font"),
- MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
- MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
- "tkinter.simpledialog"),
- MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
- MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
- MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
- MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
- MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
- MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
-]
-# Add windows specific modules.
-if sys.platform == "win32":
- _moved_attributes += [
- MovedModule("winreg", "_winreg"),
- ]
-
-for attr in _moved_attributes:
- setattr(_MovedItems, attr.name, attr)
- if isinstance(attr, MovedModule):
- _importer._add_module(attr, "moves." + attr.name)
-del attr
-
-_MovedItems._moved_attributes = _moved_attributes
-
-moves = _MovedItems(__name__ + ".moves")
-_importer._add_module(moves, "moves")
-
-
-class Module_six_moves_urllib_parse(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_parse"""
-
-
-_urllib_parse_moved_attributes = [
- MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
- MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
- MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
- MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
- MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
- MovedAttribute("urljoin", "urlparse", "urllib.parse"),
- MovedAttribute("urlparse", "urlparse", "urllib.parse"),
- MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
- MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
- MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
- MovedAttribute("quote", "urllib", "urllib.parse"),
- MovedAttribute("quote_plus", "urllib", "urllib.parse"),
- MovedAttribute("unquote", "urllib", "urllib.parse"),
- MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
- MovedAttribute("urlencode", "urllib", "urllib.parse"),
- MovedAttribute("splitquery", "urllib", "urllib.parse"),
- MovedAttribute("splittag", "urllib", "urllib.parse"),
- MovedAttribute("splituser", "urllib", "urllib.parse"),
- MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
- MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
- MovedAttribute("uses_params", "urlparse", "urllib.parse"),
- MovedAttribute("uses_query", "urlparse", "urllib.parse"),
- MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
-]
-for attr in _urllib_parse_moved_attributes:
- setattr(Module_six_moves_urllib_parse, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
- "moves.urllib_parse", "moves.urllib.parse")
-
-
-class Module_six_moves_urllib_error(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_error"""
-
-
-_urllib_error_moved_attributes = [
- MovedAttribute("URLError", "urllib2", "urllib.error"),
- MovedAttribute("HTTPError", "urllib2", "urllib.error"),
- MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
-]
-for attr in _urllib_error_moved_attributes:
- setattr(Module_six_moves_urllib_error, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
- "moves.urllib_error", "moves.urllib.error")
-
-
-class Module_six_moves_urllib_request(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_request"""
-
-
-_urllib_request_moved_attributes = [
- MovedAttribute("urlopen", "urllib2", "urllib.request"),
- MovedAttribute("install_opener", "urllib2", "urllib.request"),
- MovedAttribute("build_opener", "urllib2", "urllib.request"),
- MovedAttribute("pathname2url", "urllib", "urllib.request"),
- MovedAttribute("url2pathname", "urllib", "urllib.request"),
- MovedAttribute("getproxies", "urllib", "urllib.request"),
- MovedAttribute("Request", "urllib2", "urllib.request"),
- MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
- MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
- MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
- MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
- MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
- MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
- MovedAttribute("FileHandler", "urllib2", "urllib.request"),
- MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
- MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
- MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
- MovedAttribute("urlretrieve", "urllib", "urllib.request"),
- MovedAttribute("urlcleanup", "urllib", "urllib.request"),
- MovedAttribute("URLopener", "urllib", "urllib.request"),
- MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
- MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
-]
-for attr in _urllib_request_moved_attributes:
- setattr(Module_six_moves_urllib_request, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
- "moves.urllib_request", "moves.urllib.request")
-
-
-class Module_six_moves_urllib_response(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_response"""
-
-
-_urllib_response_moved_attributes = [
- MovedAttribute("addbase", "urllib", "urllib.response"),
- MovedAttribute("addclosehook", "urllib", "urllib.response"),
- MovedAttribute("addinfo", "urllib", "urllib.response"),
- MovedAttribute("addinfourl", "urllib", "urllib.response"),
-]
-for attr in _urllib_response_moved_attributes:
- setattr(Module_six_moves_urllib_response, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
- "moves.urllib_response", "moves.urllib.response")
-
-
-class Module_six_moves_urllib_robotparser(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_robotparser"""
-
-
-_urllib_robotparser_moved_attributes = [
- MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
-]
-for attr in _urllib_robotparser_moved_attributes:
- setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
- "moves.urllib_robotparser", "moves.urllib.robotparser")
-
-
-class Module_six_moves_urllib(types.ModuleType):
-
- """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
- __path__ = [] # mark as package
- parse = _importer._get_module("moves.urllib_parse")
- error = _importer._get_module("moves.urllib_error")
- request = _importer._get_module("moves.urllib_request")
- response = _importer._get_module("moves.urllib_response")
- robotparser = _importer._get_module("moves.urllib_robotparser")
-
- def __dir__(self):
- return ['parse', 'error', 'request', 'response', 'robotparser']
-
-_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
- "moves.urllib")
-
-
-def add_move(move):
- """Add an item to six.moves."""
- setattr(_MovedItems, move.name, move)
-
-
-def remove_move(name):
- """Remove item from six.moves."""
- try:
- delattr(_MovedItems, name)
- except AttributeError:
- try:
- del moves.__dict__[name]
- except KeyError:
- raise AttributeError("no such move, %r" % (name,))
-
-
-if PY3:
- _meth_func = "__func__"
- _meth_self = "__self__"
-
- _func_closure = "__closure__"
- _func_code = "__code__"
- _func_defaults = "__defaults__"
- _func_globals = "__globals__"
-else:
- _meth_func = "im_func"
- _meth_self = "im_self"
-
- _func_closure = "func_closure"
- _func_code = "func_code"
- _func_defaults = "func_defaults"
- _func_globals = "func_globals"
-
-
-try:
- advance_iterator = next
-except NameError:
- def advance_iterator(it):
- return it.next()
-next = advance_iterator
-
-
-try:
- callable = callable
-except NameError:
- def callable(obj):
- return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
-
-
-if PY3:
- def get_unbound_function(unbound):
- return unbound
-
- create_bound_method = types.MethodType
-
- def create_unbound_method(func, cls):
- return func
-
- Iterator = object
-else:
- def get_unbound_function(unbound):
- return unbound.im_func
-
- def create_bound_method(func, obj):
- return types.MethodType(func, obj, obj.__class__)
-
- def create_unbound_method(func, cls):
- return types.MethodType(func, None, cls)
-
- class Iterator(object):
-
- def next(self):
- return type(self).__next__(self)
-
- callable = callable
-_add_doc(get_unbound_function,
- """Get the function out of a possibly unbound function""")
-
-
-get_method_function = operator.attrgetter(_meth_func)
-get_method_self = operator.attrgetter(_meth_self)
-get_function_closure = operator.attrgetter(_func_closure)
-get_function_code = operator.attrgetter(_func_code)
-get_function_defaults = operator.attrgetter(_func_defaults)
-get_function_globals = operator.attrgetter(_func_globals)
-
-
-if PY3:
- def iterkeys(d, **kw):
- return iter(d.keys(**kw))
-
- def itervalues(d, **kw):
- return iter(d.values(**kw))
-
- def iteritems(d, **kw):
- return iter(d.items(**kw))
-
- def iterlists(d, **kw):
- return iter(d.lists(**kw))
-
- viewkeys = operator.methodcaller("keys")
-
- viewvalues = operator.methodcaller("values")
-
- viewitems = operator.methodcaller("items")
-else:
- def iterkeys(d, **kw):
- return d.iterkeys(**kw)
-
- def itervalues(d, **kw):
- return d.itervalues(**kw)
-
- def iteritems(d, **kw):
- return d.iteritems(**kw)
-
- def iterlists(d, **kw):
- return d.iterlists(**kw)
-
- viewkeys = operator.methodcaller("viewkeys")
-
- viewvalues = operator.methodcaller("viewvalues")
-
- viewitems = operator.methodcaller("viewitems")
-
-_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
-_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
-_add_doc(iteritems,
- "Return an iterator over the (key, value) pairs of a dictionary.")
-_add_doc(iterlists,
- "Return an iterator over the (key, [values]) pairs of a dictionary.")
-
-
-if PY3:
- def b(s):
- return s.encode("latin-1")
-
- def u(s):
- return s
- unichr = chr
- import struct
- int2byte = struct.Struct(">B").pack
- del struct
- byte2int = operator.itemgetter(0)
- indexbytes = operator.getitem
- iterbytes = iter
- import io
- StringIO = io.StringIO
- BytesIO = io.BytesIO
- _assertCountEqual = "assertCountEqual"
- if sys.version_info[1] <= 1:
- _assertRaisesRegex = "assertRaisesRegexp"
- _assertRegex = "assertRegexpMatches"
- else:
- _assertRaisesRegex = "assertRaisesRegex"
- _assertRegex = "assertRegex"
-else:
- def b(s):
- return s
- # Workaround for standalone backslash
-
- def u(s):
- return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
- unichr = unichr
- int2byte = chr
-
- def byte2int(bs):
- return ord(bs[0])
-
- def indexbytes(buf, i):
- return ord(buf[i])
- iterbytes = functools.partial(itertools.imap, ord)
- import StringIO
- StringIO = BytesIO = StringIO.StringIO
- _assertCountEqual = "assertItemsEqual"
- _assertRaisesRegex = "assertRaisesRegexp"
- _assertRegex = "assertRegexpMatches"
-_add_doc(b, """Byte literal""")
-_add_doc(u, """Text literal""")
-
-
-def assertCountEqual(self, *args, **kwargs):
- return getattr(self, _assertCountEqual)(*args, **kwargs)
-
-
-def assertRaisesRegex(self, *args, **kwargs):
- return getattr(self, _assertRaisesRegex)(*args, **kwargs)
-
-
-def assertRegex(self, *args, **kwargs):
- return getattr(self, _assertRegex)(*args, **kwargs)
-
-
-if PY3:
- exec_ = getattr(moves.builtins, "exec")
-
- def reraise(tp, value, tb=None):
- if value is None:
- value = tp()
- if value.__traceback__ is not tb:
- raise value.with_traceback(tb)
- raise value
-
-else:
- def exec_(_code_, _globs_=None, _locs_=None):
- """Execute code in a namespace."""
- if _globs_ is None:
- frame = sys._getframe(1)
- _globs_ = frame.f_globals
- if _locs_ is None:
- _locs_ = frame.f_locals
- del frame
- elif _locs_ is None:
- _locs_ = _globs_
- exec("""exec _code_ in _globs_, _locs_""")
-
- exec_("""def reraise(tp, value, tb=None):
- raise tp, value, tb
-""")
-
-
-if sys.version_info[:2] == (3, 2):
- exec_("""def raise_from(value, from_value):
- if from_value is None:
- raise value
- raise value from from_value
-""")
-elif sys.version_info[:2] > (3, 2):
- exec_("""def raise_from(value, from_value):
- raise value from from_value
-""")
-else:
- def raise_from(value, from_value):
- raise value
-
-
-print_ = getattr(moves.builtins, "print", None)
-if print_ is None:
- def print_(*args, **kwargs):
- """The new-style print function for Python 2.4 and 2.5."""
- fp = kwargs.pop("file", sys.stdout)
- if fp is None:
- return
-
- def write(data):
- if not isinstance(data, basestring):
- data = str(data)
- # If the file has an encoding, encode unicode with it.
- if (isinstance(fp, file) and
- isinstance(data, unicode) and
- fp.encoding is not None):
- errors = getattr(fp, "errors", None)
- if errors is None:
- errors = "strict"
- data = data.encode(fp.encoding, errors)
- fp.write(data)
- want_unicode = False
- sep = kwargs.pop("sep", None)
- if sep is not None:
- if isinstance(sep, unicode):
- want_unicode = True
- elif not isinstance(sep, str):
- raise TypeError("sep must be None or a string")
- end = kwargs.pop("end", None)
- if end is not None:
- if isinstance(end, unicode):
- want_unicode = True
- elif not isinstance(end, str):
- raise TypeError("end must be None or a string")
- if kwargs:
- raise TypeError("invalid keyword arguments to print()")
- if not want_unicode:
- for arg in args:
- if isinstance(arg, unicode):
- want_unicode = True
- break
- if want_unicode:
- newline = unicode("\n")
- space = unicode(" ")
- else:
- newline = "\n"
- space = " "
- if sep is None:
- sep = space
- if end is None:
- end = newline
- for i, arg in enumerate(args):
- if i:
- write(sep)
- write(arg)
- write(end)
-if sys.version_info[:2] < (3, 3):
- _print = print_
-
- def print_(*args, **kwargs):
- fp = kwargs.get("file", sys.stdout)
- flush = kwargs.pop("flush", False)
- _print(*args, **kwargs)
- if flush and fp is not None:
- fp.flush()
-
-_add_doc(reraise, """Reraise an exception.""")
-
-if sys.version_info[0:2] < (3, 4):
- def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
- updated=functools.WRAPPER_UPDATES):
- def wrapper(f):
- f = functools.wraps(wrapped, assigned, updated)(f)
- f.__wrapped__ = wrapped
- return f
- return wrapper
-else:
- wraps = functools.wraps
-
-
-def with_metaclass(meta, *bases):
- """Create a base class with a metaclass."""
- # This requires a bit of explanation: the basic idea is to make a dummy
- # metaclass for one level of class instantiation that replaces itself with
- # the actual metaclass.
- class metaclass(meta):
-
- def __new__(cls, name, this_bases, d):
- return meta(name, bases, d)
- return type.__new__(metaclass, 'temporary_class', (), {})
-
-
-def add_metaclass(metaclass):
- """Class decorator for creating a class with a metaclass."""
- def wrapper(cls):
- orig_vars = cls.__dict__.copy()
- slots = orig_vars.get('__slots__')
- if slots is not None:
- if isinstance(slots, str):
- slots = [slots]
- for slots_var in slots:
- orig_vars.pop(slots_var)
- orig_vars.pop('__dict__', None)
- orig_vars.pop('__weakref__', None)
- return metaclass(cls.__name__, cls.__bases__, orig_vars)
- return wrapper
-
-
-def python_2_unicode_compatible(klass):
- """
- A decorator that defines __unicode__ and __str__ methods under Python 2.
- Under Python 3 it does nothing.
-
- To support Python 2 and 3 with a single code base, define a __str__ method
- returning text and apply this decorator to the class.
- """
- if PY2:
- if '__str__' not in klass.__dict__:
- raise ValueError("@python_2_unicode_compatible cannot be applied "
- "to %s because it doesn't define __str__()." %
- klass.__name__)
- klass.__unicode__ = klass.__str__
- klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
- return klass
-
-
-# Complete the moves implementation.
-# This code is at the end of this module to speed up module loading.
-# Turn this module into a package.
-__path__ = [] # required for PEP 302 and PEP 451
-__package__ = __name__ # see PEP 366 @ReservedAssignment
-if globals().get("__spec__") is not None:
- __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
-# Remove other six meta path importers, since they cause problems. This can
-# happen if six is removed from sys.modules and then reloaded. (Setuptools does
-# this for some reason.)
-if sys.meta_path:
- for i, importer in enumerate(sys.meta_path):
- # Here's some real nastiness: Another "instance" of the six module might
- # be floating around. Therefore, we can't use isinstance() to check for
- # the six meta path importer, since the other six instance will have
- # inserted an importer with different class.
- if (type(importer).__name__ == "_SixMetaPathImporter" and
- importer.name == __name__):
- del sys.meta_path[i]
- break
- del i, importer
-# Finally, add the importer to the meta path import hook.
-sys.meta_path.append(_importer)
diff --git a/pkg_resources/_vendor/vendored.txt b/pkg_resources/_vendor/vendored.txt
deleted file mode 100644
index 46532c0a..00000000
--- a/pkg_resources/_vendor/vendored.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-packaging==16.7
-pyparsing==2.0.6
-six==1.10.0
diff --git a/pkg_resources/api_tests.txt b/pkg_resources/api_tests.txt
deleted file mode 100644
index 4fbd3d23..00000000
--- a/pkg_resources/api_tests.txt
+++ /dev/null
@@ -1,401 +0,0 @@
-Pluggable Distributions of Python Software
-==========================================
-
-Distributions
--------------
-
-A "Distribution" is a collection of files that represent a "Release" of a
-"Project" as of a particular point in time, denoted by a
-"Version"::
-
- >>> import sys, pkg_resources
- >>> from pkg_resources import Distribution
- >>> Distribution(project_name="Foo", version="1.2")
- Foo 1.2
-
-Distributions have a location, which can be a filename, URL, or really anything
-else you care to use::
-
- >>> dist = Distribution(
- ... location="http://example.com/something",
- ... project_name="Bar", version="0.9"
- ... )
-
- >>> dist
- Bar 0.9 (http://example.com/something)
-
-
-Distributions have various introspectable attributes::
-
- >>> dist.location
- 'http://example.com/something'
-
- >>> dist.project_name
- 'Bar'
-
- >>> dist.version
- '0.9'
-
- >>> dist.py_version == sys.version[:3]
- True
-
- >>> print(dist.platform)
- None
-
-Including various computed attributes::
-
- >>> from pkg_resources import parse_version
- >>> dist.parsed_version == parse_version(dist.version)
- True
-
- >>> dist.key # case-insensitive form of the project name
- 'bar'
-
-Distributions are compared (and hashed) by version first::
-
- >>> Distribution(version='1.0') == Distribution(version='1.0')
- True
- >>> Distribution(version='1.0') == Distribution(version='1.1')
- False
- >>> Distribution(version='1.0') < Distribution(version='1.1')
- True
-
-but also by project name (case-insensitive), platform, Python version,
-location, etc.::
-
- >>> Distribution(project_name="Foo",version="1.0") == \
- ... Distribution(project_name="Foo",version="1.0")
- True
-
- >>> Distribution(project_name="Foo",version="1.0") == \
- ... Distribution(project_name="foo",version="1.0")
- True
-
- >>> Distribution(project_name="Foo",version="1.0") == \
- ... Distribution(project_name="Foo",version="1.1")
- False
-
- >>> Distribution(project_name="Foo",py_version="2.3",version="1.0") == \
- ... Distribution(project_name="Foo",py_version="2.4",version="1.0")
- False
-
- >>> Distribution(location="spam",version="1.0") == \
- ... Distribution(location="spam",version="1.0")
- True
-
- >>> Distribution(location="spam",version="1.0") == \
- ... Distribution(location="baz",version="1.0")
- False
-
-
-
-Hash and compare distribution by prio/plat
-
-Get version from metadata
-provider capabilities
-egg_name()
-as_requirement()
-from_location, from_filename (w/path normalization)
-
-Releases may have zero or more "Requirements", which indicate
-what releases of another project the release requires in order to
-function. A Requirement names the other project, expresses some criteria
-as to what releases of that project are acceptable, and lists any "Extras"
-that the requiring release may need from that project. (An Extra is an
-optional feature of a Release, that can only be used if its additional
-Requirements are satisfied.)
-
-
-
-The Working Set
----------------
-
-A collection of active distributions is called a Working Set. Note that a
-Working Set can contain any importable distribution, not just pluggable ones.
-For example, the Python standard library is an importable distribution that
-will usually be part of the Working Set, even though it is not pluggable.
-Similarly, when you are doing development work on a project, the files you are
-editing are also a Distribution. (And, with a little attention to the
-directory names used, and including some additional metadata, such a
-"development distribution" can be made pluggable as well.)
-
- >>> from pkg_resources import WorkingSet
-
-A working set's entries are the sys.path entries that correspond to the active
-distributions. By default, the working set's entries are the items on
-``sys.path``::
-
- >>> ws = WorkingSet()
- >>> ws.entries == sys.path
- True
-
-But you can also create an empty working set explicitly, and add distributions
-to it::
-
- >>> ws = WorkingSet([])
- >>> ws.add(dist)
- >>> ws.entries
- ['http://example.com/something']
- >>> dist in ws
- True
- >>> Distribution('foo',version="") in ws
- False
-
-And you can iterate over its distributions::
-
- >>> list(ws)
- [Bar 0.9 (http://example.com/something)]
-
-Adding the same distribution more than once is a no-op::
-
- >>> ws.add(dist)
- >>> list(ws)
- [Bar 0.9 (http://example.com/something)]
-
-For that matter, adding multiple distributions for the same project also does
-nothing, because a working set can only hold one active distribution per
-project -- the first one added to it::
-
- >>> ws.add(
- ... Distribution(
- ... 'http://example.com/something', project_name="Bar",
- ... version="7.2"
- ... )
- ... )
- >>> list(ws)
- [Bar 0.9 (http://example.com/something)]
-
-You can append a path entry to a working set using ``add_entry()``::
-
- >>> ws.entries
- ['http://example.com/something']
- >>> ws.add_entry(pkg_resources.__file__)
- >>> ws.entries
- ['http://example.com/something', '...pkg_resources...']
-
-Multiple additions result in multiple entries, even if the entry is already in
-the working set (because ``sys.path`` can contain the same entry more than
-once)::
-
- >>> ws.add_entry(pkg_resources.__file__)
- >>> ws.entries
- ['...example.com...', '...pkg_resources...', '...pkg_resources...']
-
-And you can specify the path entry a distribution was found under, using the
-optional second parameter to ``add()``::
-
- >>> ws = WorkingSet([])
- >>> ws.add(dist,"foo")
- >>> ws.entries
- ['foo']
-
-But even if a distribution is found under multiple path entries, it still only
-shows up once when iterating the working set:
-
- >>> ws.add_entry(ws.entries[0])
- >>> list(ws)
- [Bar 0.9 (http://example.com/something)]
-
-You can ask a WorkingSet to ``find()`` a distribution matching a requirement::
-
- >>> from pkg_resources import Requirement
- >>> print(ws.find(Requirement.parse("Foo==1.0"))) # no match, return None
- None
-
- >>> ws.find(Requirement.parse("Bar==0.9")) # match, return distribution
- Bar 0.9 (http://example.com/something)
-
-Note that asking for a conflicting version of a distribution already in a
-working set triggers a ``pkg_resources.VersionConflict`` error:
-
- >>> try:
- ... ws.find(Requirement.parse("Bar==1.0"))
- ... except pkg_resources.VersionConflict as exc:
- ... print(str(exc))
- ... else:
- ... raise AssertionError("VersionConflict was not raised")
- (Bar 0.9 (http://example.com/something), Requirement.parse('Bar==1.0'))
-
-You can subscribe a callback function to receive notifications whenever a new
-distribution is added to a working set. The callback is immediately invoked
-once for each existing distribution in the working set, and then is called
-again for new distributions added thereafter::
-
- >>> def added(dist): print("Added %s" % dist)
- >>> ws.subscribe(added)
- Added Bar 0.9
- >>> foo12 = Distribution(project_name="Foo", version="1.2", location="f12")
- >>> ws.add(foo12)
- Added Foo 1.2
-
-Note, however, that only the first distribution added for a given project name
-will trigger a callback, even during the initial ``subscribe()`` callback::
-
- >>> foo14 = Distribution(project_name="Foo", version="1.4", location="f14")
- >>> ws.add(foo14) # no callback, because Foo 1.2 is already active
-
- >>> ws = WorkingSet([])
- >>> ws.add(foo12)
- >>> ws.add(foo14)
- >>> ws.subscribe(added)
- Added Foo 1.2
-
-And adding a callback more than once has no effect, either::
-
- >>> ws.subscribe(added) # no callbacks
-
- # and no double-callbacks on subsequent additions, either
- >>> just_a_test = Distribution(project_name="JustATest", version="0.99")
- >>> ws.add(just_a_test)
- Added JustATest 0.99
-
-
-Finding Plugins
----------------
-
-``WorkingSet`` objects can be used to figure out what plugins in an
-``Environment`` can be loaded without any resolution errors::
-
- >>> from pkg_resources import Environment
-
- >>> plugins = Environment([]) # normally, a list of plugin directories
- >>> plugins.add(foo12)
- >>> plugins.add(foo14)
- >>> plugins.add(just_a_test)
-
-In the simplest case, we just get the newest version of each distribution in
-the plugin environment::
-
- >>> ws = WorkingSet([])
- >>> ws.find_plugins(plugins)
- ([JustATest 0.99, Foo 1.4 (f14)], {})
-
-But if there's a problem with a version conflict or missing requirements, the
-method falls back to older versions, and the error info dict will contain an
-exception instance for each unloadable plugin::
-
- >>> ws.add(foo12) # this will conflict with Foo 1.4
- >>> ws.find_plugins(plugins)
- ([JustATest 0.99, Foo 1.2 (f12)], {Foo 1.4 (f14): VersionConflict(...)})
-
-But if you disallow fallbacks, the failed plugin will be skipped instead of
-trying older versions::
-
- >>> ws.find_plugins(plugins, fallback=False)
- ([JustATest 0.99], {Foo 1.4 (f14): VersionConflict(...)})
-
-
-
-Platform Compatibility Rules
-----------------------------
-
-On the Mac, there are potential compatibility issues for modules compiled
-on newer versions of Mac OS X than what the user is running. Additionally,
-Mac OS X will soon have two platforms to contend with: Intel and PowerPC.
-
-Basic equality works as on other platforms::
-
- >>> from pkg_resources import compatible_platforms as cp
- >>> reqd = 'macosx-10.4-ppc'
- >>> cp(reqd, reqd)
- True
- >>> cp("win32", reqd)
- False
-
-Distributions made on other machine types are not compatible::
-
- >>> cp("macosx-10.4-i386", reqd)
- False
-
-Distributions made on earlier versions of the OS are compatible, as
-long as they are from the same top-level version. The patchlevel version
-number does not matter::
-
- >>> cp("macosx-10.4-ppc", reqd)
- True
- >>> cp("macosx-10.3-ppc", reqd)
- True
- >>> cp("macosx-10.5-ppc", reqd)
- False
- >>> cp("macosx-9.5-ppc", reqd)
- False
-
-Backwards compatibility for packages made via earlier versions of
-setuptools is provided as well::
-
- >>> cp("darwin-8.2.0-Power_Macintosh", reqd)
- True
- >>> cp("darwin-7.2.0-Power_Macintosh", reqd)
- True
- >>> cp("darwin-8.2.0-Power_Macintosh", "macosx-10.3-ppc")
- False
-
-
-Environment Markers
--------------------
-
- >>> from pkg_resources import invalid_marker as im, evaluate_marker as em
- >>> import os
-
- >>> print(im("sys_platform"))
- Invalid marker: 'sys_platform', parse error at ''
-
- >>> print(im("sys_platform=="))
- Invalid marker: 'sys_platform==', parse error at ''
-
- >>> print(im("sys_platform=='win32'"))
- False
-
- >>> print(im("sys=='x'"))
- Invalid marker: "sys=='x'", parse error at "sys=='x'"
-
- >>> print(im("(extra)"))
- Invalid marker: '(extra)', parse error at ')'
-
- >>> print(im("(extra"))
- Invalid marker: '(extra', parse error at ''
-
- >>> print(im("os.open('foo')=='y'"))
- Invalid marker: "os.open('foo')=='y'", parse error at 'os.open('
-
- >>> print(im("'x'=='y' and os.open('foo')=='y'")) # no short-circuit!
- Invalid marker: "'x'=='y' and os.open('foo')=='y'", parse error at 'and os.o'
-
- >>> print(im("'x'=='x' or os.open('foo')=='y'")) # no short-circuit!
- Invalid marker: "'x'=='x' or os.open('foo')=='y'", parse error at 'or os.op'
-
- >>> print(im("'x' < 'y' < 'z'"))
- Invalid marker: "'x' < 'y' < 'z'", parse error at "< 'z'"
-
- >>> print(im("r'x'=='x'"))
- Invalid marker: "r'x'=='x'", parse error at "r'x'=='x"
-
- >>> print(im("'''x'''=='x'"))
- Invalid marker: "'''x'''=='x'", parse error at "'x'''=='"
-
- >>> print(im('"""x"""=="x"'))
- Invalid marker: '"""x"""=="x"', parse error at '"x"""=="'
-
- >>> print(im(r"x\n=='x'"))
- Invalid marker: "x\\n=='x'", parse error at "x\\n=='x'"
-
- >>> print(im("os.open=='y'"))
- Invalid marker: "os.open=='y'", parse error at 'os.open='
-
- >>> em("sys_platform=='win32'") == (sys.platform=='win32')
- True
-
- >>> em("python_version >= '2.6'")
- True
-
- >>> em("python_version > '2.5'")
- True
-
- >>> im("implementation_name=='cpython'")
- False
-
- >>> im("platform_python_implementation=='CPython'")
- False
-
- >>> im("implementation_version=='3.5.1'")
- False
diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py
deleted file mode 100644
index 6758d36f..00000000
--- a/pkg_resources/extern/__init__.py
+++ /dev/null
@@ -1,71 +0,0 @@
-import sys
-
-
-class VendorImporter:
- """
- A PEP 302 meta path importer for finding optionally-vendored
- or otherwise naturally-installed packages from root_name.
- """
- def __init__(self, root_name, vendored_names=(), vendor_pkg=None):
- self.root_name = root_name
- self.vendored_names = set(vendored_names)
- self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor')
-
- @property
- def search_path(self):
- """
- Search first the vendor package then as a natural package.
- """
- yield self.vendor_pkg + '.'
- yield ''
-
- def find_module(self, fullname, path=None):
- """
- Return self when fullname starts with root_name and the
- target module is one vendored through this importer.
- """
- root, base, target = fullname.partition(self.root_name + '.')
- if root:
- return
- if not any(map(target.startswith, self.vendored_names)):
- return
- return self
-
- def load_module(self, fullname):
- """
- Iterate over the search path to locate and load fullname.
- """
- root, base, target = fullname.partition(self.root_name + '.')
- for prefix in self.search_path:
- try:
- extant = prefix + target
- __import__(extant)
- mod = sys.modules[extant]
- sys.modules[fullname] = mod
- # mysterious hack:
- # Remove the reference to the extant package/module
- # on later Python versions to cause relative imports
- # in the vendor package to resolve the same modules
- # as those going through this importer.
- if sys.version_info > (3, 3):
- del sys.modules[extant]
- return mod
- except ImportError:
- pass
- else:
- raise ImportError(
- "The '{target}' package is required; "
- "normally this is bundled with this package so if you get "
- "this warning, consult the packager of your "
- "distribution.".format(**locals())
- )
-
- def install(self):
- """
- Install this importer into sys.meta_path if not already present.
- """
- if self not in sys.meta_path:
- sys.meta_path.append(self)
-
-names = 'packaging', 'pyparsing', 'six'
-VendorImporter(__name__, names).install()
diff --git a/pkg_resources/tests/__init__.py b/pkg_resources/tests/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/pkg_resources/tests/__init__.py
+++ /dev/null
diff --git a/pkg_resources/tests/test_markers.py b/pkg_resources/tests/test_markers.py
deleted file mode 100644
index 8d451de3..00000000
--- a/pkg_resources/tests/test_markers.py
+++ /dev/null
@@ -1,10 +0,0 @@
-try:
- import unittest.mock as mock
-except ImportError:
- import mock
-
-from pkg_resources import evaluate_marker
-
-@mock.patch('platform.python_version', return_value='2.7.10')
-def test_ordering(python_version_mock):
- assert evaluate_marker("python_full_version > '2.7.3'") is True
diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py
deleted file mode 100644
index 8b276ffc..00000000
--- a/pkg_resources/tests/test_pkg_resources.py
+++ /dev/null
@@ -1,169 +0,0 @@
-# coding: utf-8
-from __future__ import unicode_literals
-
-import sys
-import tempfile
-import os
-import zipfile
-import datetime
-import time
-import subprocess
-import stat
-import distutils.dist
-import distutils.command.install_egg_info
-
-from pkg_resources.extern.six.moves import map
-
-import pytest
-
-import pkg_resources
-
-
-try:
- unicode
-except NameError:
- unicode = str
-
-def timestamp(dt):
- """
- Return a timestamp for a local, naive datetime instance.
- """
- try:
- return dt.timestamp()
- except AttributeError:
- # Python 3.2 and earlier
- return time.mktime(dt.timetuple())
-
-class EggRemover(unicode):
- def __call__(self):
- if self in sys.path:
- sys.path.remove(self)
- if os.path.exists(self):
- os.remove(self)
-
-class TestZipProvider(object):
- finalizers = []
-
- ref_time = datetime.datetime(2013, 5, 12, 13, 25, 0)
- "A reference time for a file modification"
-
- @classmethod
- def setup_class(cls):
- "create a zip egg and add it to sys.path"
- egg = tempfile.NamedTemporaryFile(suffix='.egg', delete=False)
- zip_egg = zipfile.ZipFile(egg, 'w')
- zip_info = zipfile.ZipInfo()
- zip_info.filename = 'mod.py'
- zip_info.date_time = cls.ref_time.timetuple()
- zip_egg.writestr(zip_info, 'x = 3\n')
- zip_info = zipfile.ZipInfo()
- zip_info.filename = 'data.dat'
- zip_info.date_time = cls.ref_time.timetuple()
- zip_egg.writestr(zip_info, 'hello, world!')
- zip_egg.close()
- egg.close()
-
- sys.path.append(egg.name)
- cls.finalizers.append(EggRemover(egg.name))
-
- @classmethod
- def teardown_class(cls):
- for finalizer in cls.finalizers:
- finalizer()
-
- def test_resource_filename_rewrites_on_change(self):
- """
- If a previous call to get_resource_filename has saved the file, but
- the file has been subsequently mutated with different file of the
- same size and modification time, it should not be overwritten on a
- subsequent call to get_resource_filename.
- """
- import mod
- manager = pkg_resources.ResourceManager()
- zp = pkg_resources.ZipProvider(mod)
- filename = zp.get_resource_filename(manager, 'data.dat')
- actual = datetime.datetime.fromtimestamp(os.stat(filename).st_mtime)
- assert actual == self.ref_time
- f = open(filename, 'w')
- f.write('hello, world?')
- f.close()
- ts = timestamp(self.ref_time)
- os.utime(filename, (ts, ts))
- filename = zp.get_resource_filename(manager, 'data.dat')
- f = open(filename)
- assert f.read() == 'hello, world!'
- manager.cleanup_resources()
-
-class TestResourceManager(object):
- def test_get_cache_path(self):
- mgr = pkg_resources.ResourceManager()
- path = mgr.get_cache_path('foo')
- type_ = str(type(path))
- message = "Unexpected type from get_cache_path: " + type_
- assert isinstance(path, (unicode, str)), message
-
-
-class TestIndependence:
- """
- Tests to ensure that pkg_resources runs independently from setuptools.
- """
- def test_setuptools_not_imported(self):
- """
- In a separate Python environment, import pkg_resources and assert
- that action doesn't cause setuptools to be imported.
- """
- lines = (
- 'import pkg_resources',
- 'import sys',
- 'assert "setuptools" not in sys.modules, '
- '"setuptools was imported"',
- )
- cmd = [sys.executable, '-c', '; '.join(lines)]
- subprocess.check_call(cmd)
-
-
-
-class TestDeepVersionLookupDistutils(object):
-
- @pytest.fixture
- def env(self, tmpdir):
- """
- Create a package environment, similar to a virtualenv,
- in which packages are installed.
- """
- class Environment(str):
- pass
-
- env = Environment(tmpdir)
- tmpdir.chmod(stat.S_IRWXU)
- subs = 'home', 'lib', 'scripts', 'data', 'egg-base'
- env.paths = dict(
- (dirname, str(tmpdir / dirname))
- for dirname in subs
- )
- list(map(os.mkdir, env.paths.values()))
- return env
-
- def create_foo_pkg(self, env, version):
- """
- Create a foo package installed (distutils-style) to env.paths['lib']
- as version.
- """
- ld = "This package has unicode metadata! ❄"
- attrs = dict(name='foo', version=version, long_description=ld)
- dist = distutils.dist.Distribution(attrs)
- iei_cmd = distutils.command.install_egg_info.install_egg_info(dist)
- iei_cmd.initialize_options()
- iei_cmd.install_dir = env.paths['lib']
- iei_cmd.finalize_options()
- iei_cmd.run()
-
- def test_version_resolved_from_egg_info(self, env):
- version = '1.11.0.dev0+2329eae'
- self.create_foo_pkg(env, version)
-
- # this requirement parsing will raise a VersionConflict unless the
- # .egg-info file is parsed (see #419 on BitBucket)
- req = pkg_resources.Requirement.parse('foo>=1.9')
- dist = pkg_resources.WorkingSet([env.paths['lib']]).find(req)
- assert dist.version == version
diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py
deleted file mode 100644
index 31847dc8..00000000
--- a/pkg_resources/tests/test_resources.py
+++ /dev/null
@@ -1,834 +0,0 @@
-from __future__ import unicode_literals
-
-import os
-import sys
-import string
-
-from pkg_resources.extern.six.moves import map
-
-import pytest
-from pkg_resources.extern import packaging
-
-import pkg_resources
-from pkg_resources import (parse_requirements, VersionConflict, parse_version,
- Distribution, EntryPoint, Requirement, safe_version, safe_name,
- WorkingSet)
-
-
-class Metadata(pkg_resources.EmptyProvider):
- """Mock object to return metadata as if from an on-disk distribution"""
-
- def __init__(self, *pairs):
- self.metadata = dict(pairs)
-
- def has_metadata(self, name):
- return name in self.metadata
-
- def get_metadata(self, name):
- return self.metadata[name]
-
- def get_metadata_lines(self, name):
- return pkg_resources.yield_lines(self.get_metadata(name))
-
-
-dist_from_fn = pkg_resources.Distribution.from_filename
-
-class TestDistro:
-
- def testCollection(self):
- # empty path should produce no distributions
- ad = pkg_resources.Environment([], platform=None, python=None)
- assert list(ad) == []
- assert ad['FooPkg'] == []
- ad.add(dist_from_fn("FooPkg-1.3_1.egg"))
- ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg"))
- ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg"))
-
- # Name is in there now
- assert ad['FooPkg']
- # But only 1 package
- assert list(ad) == ['foopkg']
-
- # Distributions sort by version
- assert [dist.version for dist in ad['FooPkg']] == ['1.4','1.3-1','1.2']
-
- # Removing a distribution leaves sequence alone
- ad.remove(ad['FooPkg'][1])
- assert [dist.version for dist in ad['FooPkg']] == ['1.4','1.2']
-
- # And inserting adds them in order
- ad.add(dist_from_fn("FooPkg-1.9.egg"))
- assert [dist.version for dist in ad['FooPkg']] == ['1.9','1.4','1.2']
-
- ws = WorkingSet([])
- foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg")
- foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg")
- req, = parse_requirements("FooPkg>=1.3")
-
- # Nominal case: no distros on path, should yield all applicable
- assert ad.best_match(req, ws).version == '1.9'
- # If a matching distro is already installed, should return only that
- ws.add(foo14)
- assert ad.best_match(req, ws).version == '1.4'
-
- # If the first matching distro is unsuitable, it's a version conflict
- ws = WorkingSet([])
- ws.add(foo12)
- ws.add(foo14)
- with pytest.raises(VersionConflict):
- ad.best_match(req, ws)
-
- # If more than one match on the path, the first one takes precedence
- ws = WorkingSet([])
- ws.add(foo14)
- ws.add(foo12)
- ws.add(foo14)
- assert ad.best_match(req, ws).version == '1.4'
-
- def checkFooPkg(self,d):
- assert d.project_name == "FooPkg"
- assert d.key == "foopkg"
- assert d.version == "1.3.post1"
- assert d.py_version == "2.4"
- assert d.platform == "win32"
- assert d.parsed_version == parse_version("1.3-1")
-
- def testDistroBasics(self):
- d = Distribution(
- "/some/path",
- project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32"
- )
- self.checkFooPkg(d)
-
- d = Distribution("/some/path")
- assert d.py_version == sys.version[:3]
- assert d.platform == None
-
- def testDistroParse(self):
- d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg")
- self.checkFooPkg(d)
- d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg-info")
- self.checkFooPkg(d)
-
- def testDistroMetadata(self):
- d = Distribution(
- "/some/path", project_name="FooPkg", py_version="2.4", platform="win32",
- metadata = Metadata(
- ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n")
- )
- )
- self.checkFooPkg(d)
-
- def distRequires(self, txt):
- return Distribution("/foo", metadata=Metadata(('depends.txt', txt)))
-
- def checkRequires(self, dist, txt, extras=()):
- assert list(dist.requires(extras)) == list(parse_requirements(txt))
-
- def testDistroDependsSimple(self):
- for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0":
- self.checkRequires(self.distRequires(v), v)
-
- def testResolve(self):
- ad = pkg_resources.Environment([])
- ws = WorkingSet([])
- # Resolving no requirements -> nothing to install
- assert list(ws.resolve([], ad)) == []
- # Request something not in the collection -> DistributionNotFound
- with pytest.raises(pkg_resources.DistributionNotFound):
- ws.resolve(parse_requirements("Foo"), ad)
-
- Foo = Distribution.from_filename(
- "/foo_dir/Foo-1.2.egg",
- metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
- )
- ad.add(Foo)
- ad.add(Distribution.from_filename("Foo-0.9.egg"))
-
- # Request thing(s) that are available -> list to activate
- for i in range(3):
- targets = list(ws.resolve(parse_requirements("Foo"), ad))
- assert targets == [Foo]
- list(map(ws.add, targets))
- with pytest.raises(VersionConflict):
- ws.resolve(parse_requirements("Foo==0.9"), ad)
- ws = WorkingSet([]) # reset
-
- # Request an extra that causes an unresolved dependency for "Baz"
- with pytest.raises(pkg_resources.DistributionNotFound):
- ws.resolve(parse_requirements("Foo[bar]"), ad)
- Baz = Distribution.from_filename(
- "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
- )
- ad.add(Baz)
-
- # Activation list now includes resolved dependency
- assert list(ws.resolve(parse_requirements("Foo[bar]"), ad)) ==[Foo,Baz]
- # Requests for conflicting versions produce VersionConflict
- with pytest.raises(VersionConflict) as vc:
- ws.resolve(parse_requirements("Foo==1.2\nFoo!=1.2"), ad)
-
- msg = 'Foo 0.9 is installed but Foo==1.2 is required'
- assert vc.value.report() == msg
-
- def test_environment_marker_evaluation_negative(self):
- """Environment markers are evaluated at resolution time."""
- ad = pkg_resources.Environment([])
- ws = WorkingSet([])
- res = ws.resolve(parse_requirements("Foo;python_version<'2'"), ad)
- assert list(res) == []
-
- def test_environment_marker_evaluation_positive(self):
- ad = pkg_resources.Environment([])
- ws = WorkingSet([])
- Foo = Distribution.from_filename("/foo_dir/Foo-1.2.dist-info")
- ad.add(Foo)
- res = ws.resolve(parse_requirements("Foo;python_version>='2'"), ad)
- assert list(res) == [Foo]
-
- def test_environment_marker_evaluation_called(self):
- """
- If one package foo requires bar without any extras,
- markers should pass for bar without extras.
- """
- parent_req, = parse_requirements("foo")
- req, = parse_requirements("bar;python_version>='2'")
- req_extras = pkg_resources._ReqExtras({req: parent_req.extras})
- assert req_extras.markers_pass(req)
-
- parent_req, = parse_requirements("foo[]")
- req, = parse_requirements("bar;python_version>='2'")
- req_extras = pkg_resources._ReqExtras({req: parent_req.extras})
- assert req_extras.markers_pass(req)
-
- def test_marker_evaluation_with_extras(self):
- """Extras are also evaluated as markers at resolution time."""
- ad = pkg_resources.Environment([])
- ws = WorkingSet([])
- # Metadata needs to be native strings due to cStringIO behaviour in
- # 2.6, so use str().
- Foo = Distribution.from_filename(
- "/foo_dir/Foo-1.2.dist-info",
- metadata=Metadata(("METADATA", str("Provides-Extra: baz\n"
- "Requires-Dist: quux; extra=='baz'")))
- )
- ad.add(Foo)
- assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo]
- quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info")
- ad.add(quux)
- res = list(ws.resolve(parse_requirements("Foo[baz]"), ad))
- assert res == [Foo,quux]
-
- def test_marker_evaluation_with_multiple_extras(self):
- ad = pkg_resources.Environment([])
- ws = WorkingSet([])
- # Metadata needs to be native strings due to cStringIO behaviour in
- # 2.6, so use str().
- Foo = Distribution.from_filename(
- "/foo_dir/Foo-1.2.dist-info",
- metadata=Metadata(("METADATA", str("Provides-Extra: baz\n"
- "Requires-Dist: quux; extra=='baz'\n"
- "Provides-Extra: bar\n"
- "Requires-Dist: fred; extra=='bar'\n")))
- )
- ad.add(Foo)
- quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info")
- ad.add(quux)
- fred = Distribution.from_filename("/foo_dir/fred-0.1.dist-info")
- ad.add(fred)
- res = list(ws.resolve(parse_requirements("Foo[baz,bar]"), ad))
- assert sorted(res) == [fred,quux,Foo]
-
- def test_marker_evaluation_with_extras_loop(self):
- ad = pkg_resources.Environment([])
- ws = WorkingSet([])
- # Metadata needs to be native strings due to cStringIO behaviour in
- # 2.6, so use str().
- a = Distribution.from_filename(
- "/foo_dir/a-0.2.dist-info",
- metadata=Metadata(("METADATA", str("Requires-Dist: c[a]")))
- )
- b = Distribution.from_filename(
- "/foo_dir/b-0.3.dist-info",
- metadata=Metadata(("METADATA", str("Requires-Dist: c[b]")))
- )
- c = Distribution.from_filename(
- "/foo_dir/c-1.0.dist-info",
- metadata=Metadata(("METADATA", str("Provides-Extra: a\n"
- "Requires-Dist: b;extra=='a'\n"
- "Provides-Extra: b\n"
- "Requires-Dist: foo;extra=='b'")))
- )
- foo = Distribution.from_filename("/foo_dir/foo-0.1.dist-info")
- for dist in (a, b, c, foo):
- ad.add(dist)
- res = list(ws.resolve(parse_requirements("a"), ad))
- assert res == [a, c, b, foo]
-
- def testDistroDependsOptions(self):
- d = self.distRequires("""
- Twisted>=1.5
- [docgen]
- ZConfig>=2.0
- docutils>=0.3
- [fastcgi]
- fcgiapp>=0.1""")
- self.checkRequires(d,"Twisted>=1.5")
- self.checkRequires(
- d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"]
- )
- self.checkRequires(
- d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]
- )
- self.checkRequires(
- d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(),
- ["docgen","fastcgi"]
- )
- self.checkRequires(
- d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
- ["fastcgi", "docgen"]
- )
- with pytest.raises(pkg_resources.UnknownExtra):
- d.requires(["foo"])
-
-
-class TestWorkingSet:
- def test_find_conflicting(self):
- ws = WorkingSet([])
- Foo = Distribution.from_filename("/foo_dir/Foo-1.2.egg")
- ws.add(Foo)
-
- # create a requirement that conflicts with Foo 1.2
- req = next(parse_requirements("Foo<1.2"))
-
- with pytest.raises(VersionConflict) as vc:
- ws.find(req)
-
- msg = 'Foo 1.2 is installed but Foo<1.2 is required'
- assert vc.value.report() == msg
-
- def test_resolve_conflicts_with_prior(self):
- """
- A ContextualVersionConflict should be raised when a requirement
- conflicts with a prior requirement for a different package.
- """
- # Create installation where Foo depends on Baz 1.0 and Bar depends on
- # Baz 2.0.
- ws = WorkingSet([])
- md = Metadata(('depends.txt', "Baz==1.0"))
- Foo = Distribution.from_filename("/foo_dir/Foo-1.0.egg", metadata=md)
- ws.add(Foo)
- md = Metadata(('depends.txt', "Baz==2.0"))
- Bar = Distribution.from_filename("/foo_dir/Bar-1.0.egg", metadata=md)
- ws.add(Bar)
- Baz = Distribution.from_filename("/foo_dir/Baz-1.0.egg")
- ws.add(Baz)
- Baz = Distribution.from_filename("/foo_dir/Baz-2.0.egg")
- ws.add(Baz)
-
- with pytest.raises(VersionConflict) as vc:
- ws.resolve(parse_requirements("Foo\nBar\n"))
-
- msg = "Baz 1.0 is installed but Baz==2.0 is required by "
- msg += repr(set(['Bar']))
- assert vc.value.report() == msg
-
-
-class TestEntryPoints:
-
- def assertfields(self, ep):
- assert ep.name == "foo"
- assert ep.module_name == "pkg_resources.tests.test_resources"
- assert ep.attrs == ("TestEntryPoints",)
- assert ep.extras == ("x",)
- assert ep.load() is TestEntryPoints
- expect = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]"
- assert str(ep) == expect
-
- def setup_method(self, method):
- self.dist = Distribution.from_filename(
- "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]')))
-
- def testBasics(self):
- ep = EntryPoint(
- "foo", "pkg_resources.tests.test_resources", ["TestEntryPoints"],
- ["x"], self.dist
- )
- self.assertfields(ep)
-
- def testParse(self):
- s = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]"
- ep = EntryPoint.parse(s, self.dist)
- self.assertfields(ep)
-
- ep = EntryPoint.parse("bar baz= spammity[PING]")
- assert ep.name == "bar baz"
- assert ep.module_name == "spammity"
- assert ep.attrs == ()
- assert ep.extras == ("ping",)
-
- ep = EntryPoint.parse(" fizzly = wocka:foo")
- assert ep.name == "fizzly"
- assert ep.module_name == "wocka"
- assert ep.attrs == ("foo",)
- assert ep.extras == ()
-
- # plus in the name
- spec = "html+mako = mako.ext.pygmentplugin:MakoHtmlLexer"
- ep = EntryPoint.parse(spec)
- assert ep.name == 'html+mako'
-
- reject_specs = "foo", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2"
- @pytest.mark.parametrize("reject_spec", reject_specs)
- def test_reject_spec(self, reject_spec):
- with pytest.raises(ValueError):
- EntryPoint.parse(reject_spec)
-
- def test_printable_name(self):
- """
- Allow any printable character in the name.
- """
- # Create a name with all printable characters; strip the whitespace.
- name = string.printable.strip()
- spec = "{name} = module:attr".format(**locals())
- ep = EntryPoint.parse(spec)
- assert ep.name == name
-
- def checkSubMap(self, m):
- assert len(m) == len(self.submap_expect)
- for key, ep in self.submap_expect.items():
- assert m.get(key).name == ep.name
- assert m.get(key).module_name == ep.module_name
- assert sorted(m.get(key).attrs) == sorted(ep.attrs)
- assert sorted(m.get(key).extras) == sorted(ep.extras)
-
- submap_expect = dict(
- feature1=EntryPoint('feature1', 'somemodule', ['somefunction']),
- feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']),
- feature3=EntryPoint('feature3', 'this.module', extras=['something'])
- )
- submap_str = """
- # define features for blah blah
- feature1 = somemodule:somefunction
- feature2 = another.module:SomeClass [extra1,extra2]
- feature3 = this.module [something]
- """
-
- def testParseList(self):
- self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str))
- with pytest.raises(ValueError):
- EntryPoint.parse_group("x a", "foo=bar")
- with pytest.raises(ValueError):
- EntryPoint.parse_group("x", ["foo=baz", "foo=bar"])
-
- def testParseMap(self):
- m = EntryPoint.parse_map({'xyz':self.submap_str})
- self.checkSubMap(m['xyz'])
- assert list(m.keys()) == ['xyz']
- m = EntryPoint.parse_map("[xyz]\n"+self.submap_str)
- self.checkSubMap(m['xyz'])
- assert list(m.keys()) == ['xyz']
- with pytest.raises(ValueError):
- EntryPoint.parse_map(["[xyz]", "[xyz]"])
- with pytest.raises(ValueError):
- EntryPoint.parse_map(self.submap_str)
-
-class TestRequirements:
-
- def testBasics(self):
- r = Requirement.parse("Twisted>=1.2")
- assert str(r) == "Twisted>=1.2"
- assert repr(r) == "Requirement.parse('Twisted>=1.2')"
- assert r == Requirement("Twisted>=1.2")
- assert r == Requirement("twisTed>=1.2")
- assert r != Requirement("Twisted>=2.0")
- assert r != Requirement("Zope>=1.2")
- assert r != Requirement("Zope>=3.0")
- assert r != Requirement("Twisted[extras]>=1.2")
-
- def testOrdering(self):
- r1 = Requirement("Twisted==1.2c1,>=1.2")
- r2 = Requirement("Twisted>=1.2,==1.2c1")
- assert r1 == r2
- assert str(r1) == str(r2)
- assert str(r2) == "Twisted==1.2c1,>=1.2"
-
- def testBasicContains(self):
- r = Requirement("Twisted>=1.2")
- foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg")
- twist11 = Distribution.from_filename("Twisted-1.1.egg")
- twist12 = Distribution.from_filename("Twisted-1.2.egg")
- assert parse_version('1.2') in r
- assert parse_version('1.1') not in r
- assert '1.2' in r
- assert '1.1' not in r
- assert foo_dist not in r
- assert twist11 not in r
- assert twist12 in r
-
- def testOptionsAndHashing(self):
- r1 = Requirement.parse("Twisted[foo,bar]>=1.2")
- r2 = Requirement.parse("Twisted[bar,FOO]>=1.2")
- assert r1 == r2
- assert set(r1.extras) == set(("foo", "bar"))
- assert set(r2.extras) == set(("foo", "bar"))
- assert hash(r1) == hash(r2)
- assert (
- hash(r1)
- ==
- hash((
- "twisted",
- packaging.specifiers.SpecifierSet(">=1.2"),
- frozenset(["foo","bar"]),
- None
- ))
- )
-
- def testVersionEquality(self):
- r1 = Requirement.parse("foo==0.3a2")
- r2 = Requirement.parse("foo!=0.3a4")
- d = Distribution.from_filename
-
- assert d("foo-0.3a4.egg") not in r1
- assert d("foo-0.3a1.egg") not in r1
- assert d("foo-0.3a4.egg") not in r2
-
- assert d("foo-0.3a2.egg") in r1
- assert d("foo-0.3a2.egg") in r2
- assert d("foo-0.3a3.egg") in r2
- assert d("foo-0.3a5.egg") in r2
-
- def testSetuptoolsProjectName(self):
- """
- The setuptools project should implement the setuptools package.
- """
-
- assert (
- Requirement.parse('setuptools').project_name == 'setuptools')
- # setuptools 0.7 and higher means setuptools.
- assert (
- Requirement.parse('setuptools == 0.7').project_name == 'setuptools')
- assert (
- Requirement.parse('setuptools == 0.7a1').project_name == 'setuptools')
- assert (
- Requirement.parse('setuptools >= 0.7').project_name == 'setuptools')
-
-
-class TestParsing:
-
- def testEmptyParse(self):
- assert list(parse_requirements('')) == []
-
- def testYielding(self):
- for inp,out in [
- ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']),
- (['x\n\n','y'], ['x','y']),
- ]:
- assert list(pkg_resources.yield_lines(inp)) == out
-
- def testSplitting(self):
- sample = """
- x
- [Y]
- z
-
- a
- [b ]
- # foo
- c
- [ d]
- [q]
- v
- """
- assert (
- list(pkg_resources.split_sections(sample))
- ==
- [
- (None, ["x"]),
- ("Y", ["z", "a"]),
- ("b", ["c"]),
- ("d", []),
- ("q", ["v"]),
- ]
- )
- with pytest.raises(ValueError):
- list(pkg_resources.split_sections("[foo"))
-
- def testSafeName(self):
- assert safe_name("adns-python") == "adns-python"
- assert safe_name("WSGI Utils") == "WSGI-Utils"
- assert safe_name("WSGI Utils") == "WSGI-Utils"
- assert safe_name("Money$$$Maker") == "Money-Maker"
- assert safe_name("peak.web") != "peak-web"
-
- def testSafeVersion(self):
- assert safe_version("1.2-1") == "1.2.post1"
- assert safe_version("1.2 alpha") == "1.2.alpha"
- assert safe_version("2.3.4 20050521") == "2.3.4.20050521"
- assert safe_version("Money$$$Maker") == "Money-Maker"
- assert safe_version("peak.web") == "peak.web"
-
- def testSimpleRequirements(self):
- assert (
- list(parse_requirements('Twis-Ted>=1.2-1'))
- ==
- [Requirement('Twis-Ted>=1.2-1')]
- )
- assert (
- list(parse_requirements('Twisted >=1.2, \ # more\n<2.0'))
- ==
- [Requirement('Twisted>=1.2,<2.0')]
- )
- assert (
- Requirement.parse("FooBar==1.99a3")
- ==
- Requirement("FooBar==1.99a3")
- )
- with pytest.raises(ValueError):
- Requirement.parse(">=2.3")
- with pytest.raises(ValueError):
- Requirement.parse("x\\")
- with pytest.raises(ValueError):
- Requirement.parse("x==2 q")
- with pytest.raises(ValueError):
- Requirement.parse("X==1\nY==2")
- with pytest.raises(ValueError):
- Requirement.parse("#")
-
- def test_requirements_with_markers(self):
- assert (
- Requirement.parse("foobar;os_name=='a'")
- ==
- Requirement.parse("foobar;os_name=='a'")
- )
- assert (
- Requirement.parse("name==1.1;python_version=='2.7'")
- !=
- Requirement.parse("name==1.1;python_version=='3.3'")
- )
- assert (
- Requirement.parse("name==1.0;python_version=='2.7'")
- !=
- Requirement.parse("name==1.2;python_version=='2.7'")
- )
- assert (
- Requirement.parse("name[foo]==1.0;python_version=='3.3'")
- !=
- Requirement.parse("name[foo,bar]==1.0;python_version=='3.3'")
- )
-
- def test_local_version(self):
- req, = parse_requirements('foo==1.0.org1')
-
- def test_spaces_between_multiple_versions(self):
- req, = parse_requirements('foo>=1.0, <3')
- req, = parse_requirements('foo >= 1.0, < 3')
-
- def testVersionEquality(self):
- def c(s1,s2):
- p1, p2 = parse_version(s1),parse_version(s2)
- assert p1 == p2, (s1,s2,p1,p2)
-
- c('1.2-rc1', '1.2rc1')
- c('0.4', '0.4.0')
- c('0.4.0.0', '0.4.0')
- c('0.4.0-0', '0.4-0')
- c('0post1', '0.0post1')
- c('0pre1', '0.0c1')
- c('0.0.0preview1', '0c1')
- c('0.0c1', '0-rc1')
- c('1.2a1', '1.2.a.1')
- c('1.2.a', '1.2a')
-
- def testVersionOrdering(self):
- def c(s1,s2):
- p1, p2 = parse_version(s1),parse_version(s2)
- assert p1<p2, (s1,s2,p1,p2)
-
- c('2.1','2.1.1')
- c('2a1','2b0')
- c('2a1','2.1')
- c('2.3a1', '2.3')
- c('2.1-1', '2.1-2')
- c('2.1-1', '2.1.1')
- c('2.1', '2.1post4')
- c('2.1a0-20040501', '2.1')
- c('1.1', '02.1')
- c('3.2', '3.2.post0')
- c('3.2post1', '3.2post2')
- c('0.4', '4.0')
- c('0.0.4', '0.4.0')
- c('0post1', '0.4post1')
- c('2.1.0-rc1','2.1.0')
- c('2.1dev','2.1a0')
-
- torture ="""
- 0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1
- 0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2
- 0.77.2-1 0.77.1-1 0.77.0-1
- """.split()
-
- for p,v1 in enumerate(torture):
- for v2 in torture[p+1:]:
- c(v2,v1)
-
- def testVersionBuildout(self):
- """
- Buildout has a function in it's bootstrap.py that inspected the return
- value of parse_version. The new parse_version returns a Version class
- which needs to support this behavior, at least for now.
- """
- def buildout(parsed_version):
- _final_parts = '*final-', '*final'
-
- def _final_version(parsed_version):
- for part in parsed_version:
- if (part[:1] == '*') and (part not in _final_parts):
- return False
- return True
- return _final_version(parsed_version)
-
- assert buildout(parse_version("1.0"))
- assert not buildout(parse_version("1.0a1"))
-
- def testVersionIndexable(self):
- """
- Some projects were doing things like parse_version("v")[0], so we'll
- support indexing the same as we support iterating.
- """
- assert parse_version("1.0")[0] == "00000001"
-
- def testVersionTupleSort(self):
- """
- Some projects expected to be able to sort tuples against the return
- value of parse_version. So again we'll add a warning enabled shim to
- make this possible.
- """
- assert parse_version("1.0") < tuple(parse_version("2.0"))
- assert parse_version("1.0") <= tuple(parse_version("2.0"))
- assert parse_version("1.0") == tuple(parse_version("1.0"))
- assert parse_version("3.0") > tuple(parse_version("2.0"))
- assert parse_version("3.0") >= tuple(parse_version("2.0"))
- assert parse_version("3.0") != tuple(parse_version("2.0"))
- assert not (parse_version("3.0") != tuple(parse_version("3.0")))
-
- def testVersionHashable(self):
- """
- Ensure that our versions stay hashable even though we've subclassed
- them and added some shim code to them.
- """
- assert (
- hash(parse_version("1.0"))
- ==
- hash(parse_version("1.0"))
- )
-
-
-class TestNamespaces:
-
- ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n"
-
- @pytest.yield_fixture
- def symlinked_tmpdir(self, tmpdir):
- """
- Where available, return the tempdir as a symlink,
- which as revealed in #231 is more fragile than
- a natural tempdir.
- """
- if not hasattr(os, 'symlink'):
- yield str(tmpdir)
- return
-
- link_name = str(tmpdir) + '-linked'
- os.symlink(str(tmpdir), link_name)
- try:
- yield type(tmpdir)(link_name)
- finally:
- os.unlink(link_name)
-
- @pytest.yield_fixture(autouse=True)
- def patched_path(self, tmpdir):
- """
- Patch sys.path to include the 'site-pkgs' dir. Also
- restore pkg_resources._namespace_packages to its
- former state.
- """
- saved_ns_pkgs = pkg_resources._namespace_packages.copy()
- saved_sys_path = sys.path[:]
- site_pkgs = tmpdir.mkdir('site-pkgs')
- sys.path.append(str(site_pkgs))
- try:
- yield
- finally:
- pkg_resources._namespace_packages = saved_ns_pkgs
- sys.path = saved_sys_path
-
- def test_two_levels_deep(self, symlinked_tmpdir):
- """
- Test nested namespace packages
- Create namespace packages in the following tree :
- site-packages-1/pkg1/pkg2
- site-packages-2/pkg1/pkg2
- Check both are in the _namespace_packages dict and that their __path__
- is correct
- """
- real_tmpdir = symlinked_tmpdir.realpath()
- tmpdir = symlinked_tmpdir
- sys.path.append(str(tmpdir / 'site-pkgs2'))
- site_dirs = tmpdir / 'site-pkgs', tmpdir / 'site-pkgs2'
- for site in site_dirs:
- pkg1 = site / 'pkg1'
- pkg2 = pkg1 / 'pkg2'
- pkg2.ensure_dir()
- (pkg1 / '__init__.py').write_text(self.ns_str, encoding='utf-8')
- (pkg2 / '__init__.py').write_text(self.ns_str, encoding='utf-8')
- import pkg1
- assert "pkg1" in pkg_resources._namespace_packages
- # attempt to import pkg2 from site-pkgs2
- import pkg1.pkg2
- # check the _namespace_packages dict
- assert "pkg1.pkg2" in pkg_resources._namespace_packages
- assert pkg_resources._namespace_packages["pkg1"] == ["pkg1.pkg2"]
- # check the __path__ attribute contains both paths
- expected = [
- str(real_tmpdir / "site-pkgs" / "pkg1" / "pkg2"),
- str(real_tmpdir / "site-pkgs2" / "pkg1" / "pkg2"),
- ]
- assert pkg1.pkg2.__path__ == expected
-
- def test_path_order(self, symlinked_tmpdir):
- """
- Test that if multiple versions of the same namespace package subpackage
- are on different sys.path entries, that only the one earliest on
- sys.path is imported, and that the namespace package's __path__ is in
- the correct order.
-
- Regression test for https://github.com/pypa/setuptools/issues/207
- """
-
- tmpdir = symlinked_tmpdir
- site_dirs = (
- tmpdir / "site-pkgs",
- tmpdir / "site-pkgs2",
- tmpdir / "site-pkgs3",
- )
-
- vers_str = "__version__ = %r"
-
- for number, site in enumerate(site_dirs, 1):
- if number > 1:
- sys.path.append(str(site))
- nspkg = site / 'nspkg'
- subpkg = nspkg / 'subpkg'
- subpkg.ensure_dir()
- (nspkg / '__init__.py').write_text(self.ns_str, encoding='utf-8')
- (subpkg / '__init__.py').write_text(vers_str % number, encoding='utf-8')
-
- import nspkg.subpkg
- import nspkg
- expected = [
- str(site.realpath() / 'nspkg')
- for site in site_dirs
- ]
- assert nspkg.__path__ == expected
- assert nspkg.subpkg.__version__ == 1
diff --git a/pytest.ini b/pytest.ini
deleted file mode 100755
index 2fa3a3ec..00000000
--- a/pytest.ini
+++ /dev/null
@@ -1,3 +0,0 @@
-[pytest]
-addopts=--doctest-modules --ignore release.py --ignore setuptools/lib2to3_ex.py --ignore tests/manual_test.py --ignore tests/shlib_test --doctest-glob=pkg_resources/api_tests.txt --ignore scripts/upload-old-releases-as-zip.py --ignore pavement.py
-norecursedirs=dist build *.egg setuptools/extern pkg_resources/extern
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100755
index 775dbda2..00000000
--- a/setup.cfg
+++ /dev/null
@@ -1,32 +0,0 @@
-[bumpversion]
-current_version = 20.10.1
-commit = True
-tag = True
-
-[egg_info]
-tag_build = .post
-tag_date = 1
-
-[aliases]
-clean_egg_info = egg_info -RDb ''
-release = clean_egg_info sdist bdist_wheel build_sphinx
-source = register sdist binary
-binary = bdist_egg upload --show-response
-test = pytest
-
-[build_sphinx]
-source-dir = docs/
-build-dir = docs/build
-all_files = 1
-
-[upload_docs]
-upload-dir = docs/build/html
-
-[sdist]
-formats = gztar zip
-
-[wheel]
-universal = 1
-
-[bumpversion:file:setup.py]
-
diff --git a/setup.py b/setup.py
deleted file mode 100755
index f72ed3a1..00000000
--- a/setup.py
+++ /dev/null
@@ -1,165 +0,0 @@
-#!/usr/bin/env python
-"""
-Distutils setup file, used to install or test 'setuptools'
-"""
-
-import io
-import os
-import sys
-import textwrap
-
-# Allow to run setup.py from another directory.
-os.chdir(os.path.dirname(os.path.abspath(__file__)))
-
-src_root = None
-
-from distutils.util import convert_path
-
-command_ns = {}
-init_path = convert_path('setuptools/command/__init__.py')
-with open(init_path) as init_file:
- exec(init_file.read(), command_ns)
-
-SETUP_COMMANDS = command_ns['__all__']
-
-import setuptools
-
-scripts = []
-
-def _gen_console_scripts():
- yield "easy_install = setuptools.command.easy_install:main"
-
- # Gentoo distributions manage the python-version-specific scripts
- # themselves, so those platforms define an environment variable to
- # suppress the creation of the version-specific scripts.
- var_names = (
- 'SETUPTOOLS_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT',
- 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT',
- )
- if any(os.environ.get(var) not in (None, "", "0") for var in var_names):
- return
- yield ("easy_install-{shortver} = setuptools.command.easy_install:main"
- .format(shortver=sys.version[:3]))
-
-console_scripts = list(_gen_console_scripts())
-
-readme_file = io.open('README.rst', encoding='utf-8')
-
-with readme_file:
- long_description = readme_file.read()
-
-package_data = {
- 'setuptools': ['script (dev).tmpl', 'script.tmpl', 'site-patch.py']}
-force_windows_specific_files = (
- os.environ.get("SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES")
- not in (None, "", "0")
-)
-if (sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt')) \
- or force_windows_specific_files:
- package_data.setdefault('setuptools', []).extend(['*.exe'])
- package_data.setdefault('setuptools.command', []).extend(['*.xml'])
-
-needs_pytest = set(['ptr', 'pytest', 'test']).intersection(sys.argv)
-pytest_runner = ['pytest-runner'] if needs_pytest else []
-needs_sphinx = set(['build_sphinx', 'upload_docs', 'release']).intersection(sys.argv)
-sphinx = ['sphinx', 'rst.linker>=1.5'] if needs_sphinx else []
-needs_wheel = set(['release', 'bdist_wheel']).intersection(sys.argv)
-wheel = ['wheel'] if needs_wheel else []
-
-setup_params = dict(
- name="setuptools",
- version="20.10.1",
- description="Easily download, build, install, upgrade, and uninstall "
- "Python packages",
- author="Python Packaging Authority",
- author_email="distutils-sig@python.org",
- long_description=long_description,
- keywords="CPAN PyPI distutils eggs package management",
- url="https://github.com/pypa/setuptools",
- src_root=src_root,
- packages=setuptools.find_packages(exclude=['*.tests']),
- package_data=package_data,
-
- py_modules=['easy_install'],
-
- zip_safe=True,
-
- entry_points={
- "distutils.commands": [
- "%(cmd)s = setuptools.command.%(cmd)s:%(cmd)s" % locals()
- for cmd in SETUP_COMMANDS
- ],
- "distutils.setup_keywords": [
- "eager_resources = setuptools.dist:assert_string_list",
- "namespace_packages = setuptools.dist:check_nsp",
- "extras_require = setuptools.dist:check_extras",
- "install_requires = setuptools.dist:check_requirements",
- "tests_require = setuptools.dist:check_requirements",
- "setup_requires = setuptools.dist:check_requirements",
- "entry_points = setuptools.dist:check_entry_points",
- "test_suite = setuptools.dist:check_test_suite",
- "zip_safe = setuptools.dist:assert_bool",
- "package_data = setuptools.dist:check_package_data",
- "exclude_package_data = setuptools.dist:check_package_data",
- "include_package_data = setuptools.dist:assert_bool",
- "packages = setuptools.dist:check_packages",
- "dependency_links = setuptools.dist:assert_string_list",
- "test_loader = setuptools.dist:check_importable",
- "test_runner = setuptools.dist:check_importable",
- "use_2to3 = setuptools.dist:assert_bool",
- "convert_2to3_doctests = setuptools.dist:assert_string_list",
- "use_2to3_fixers = setuptools.dist:assert_string_list",
- "use_2to3_exclude_fixers = setuptools.dist:assert_string_list",
- ],
- "egg_info.writers": [
- "PKG-INFO = setuptools.command.egg_info:write_pkg_info",
- "requires.txt = setuptools.command.egg_info:write_requirements",
- "entry_points.txt = setuptools.command.egg_info:write_entries",
- "eager_resources.txt = setuptools.command.egg_info:overwrite_arg",
- "namespace_packages.txt = setuptools.command.egg_info:overwrite_arg",
- "top_level.txt = setuptools.command.egg_info:write_toplevel_names",
- "depends.txt = setuptools.command.egg_info:warn_depends_obsolete",
- "dependency_links.txt = setuptools.command.egg_info:overwrite_arg",
- ],
- "console_scripts": console_scripts,
-
- "setuptools.installation":
- ['eggsecutable = setuptools.command.easy_install:bootstrap'],
- },
-
-
- classifiers=textwrap.dedent("""
- Development Status :: 5 - Production/Stable
- Intended Audience :: Developers
- License :: OSI Approved :: MIT License
- Operating System :: OS Independent
- Programming Language :: Python :: 2.6
- Programming Language :: Python :: 2.7
- Programming Language :: Python :: 3
- Programming Language :: Python :: 3.3
- Programming Language :: Python :: 3.4
- Programming Language :: Python :: 3.5
- Topic :: Software Development :: Libraries :: Python Modules
- Topic :: System :: Archiving :: Packaging
- Topic :: System :: Systems Administration
- Topic :: Utilities
- """).strip().splitlines(),
- extras_require={
- "ssl:sys_platform=='win32'": "wincertstore==0.2",
- "certs": "certifi==2016.2.28",
- },
- dependency_links=[
- 'https://pypi.python.org/packages/source/c/certifi/certifi-2016.2.28.tar.gz#md5=5d672aa766e1f773c75cfeccd02d3650',
- 'https://pypi.python.org/packages/source/w/wincertstore/wincertstore-0.2.zip#md5=ae728f2f007185648d0c7a8679b361e2',
- ],
- scripts=[],
- tests_require=[
- 'setuptools[ssl]',
- 'pytest>=2.8',
- ] + (['mock'] if sys.version_info[:2] < (3, 3) else []),
- setup_requires=[
- ] + sphinx + pytest_runner + wheel,
-)
-
-if __name__ == '__main__':
- dist = setuptools.setup(**setup_params)
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
deleted file mode 100644
index 67b57e4f..00000000
--- a/setuptools/__init__.py
+++ /dev/null
@@ -1,169 +0,0 @@
-"""Extensions to the 'distutils' for large or complex distributions"""
-
-import os
-import functools
-import distutils.core
-import distutils.filelist
-from distutils.core import Command as _Command
-from distutils.util import convert_path
-from fnmatch import fnmatchcase
-
-from setuptools.extern.six.moves import filterfalse, map
-
-import setuptools.version
-from setuptools.extension import Extension
-from setuptools.dist import Distribution, Feature, _get_unpatched
-from setuptools.depends import Require
-
-__all__ = [
- 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
- 'find_packages'
-]
-
-__version__ = setuptools.version.__version__
-
-bootstrap_install_from = None
-
-# If we run 2to3 on .py files, should we also convert docstrings?
-# Default: yes; assume that we can detect doctests reliably
-run_2to3_on_doctests = True
-# Standard package names for fixer packages
-lib2to3_fixer_packages = ['lib2to3.fixes']
-
-
-class PackageFinder(object):
- @classmethod
- def find(cls, where='.', exclude=(), include=('*',)):
- """Return a list all Python packages found within directory 'where'
-
- 'where' should be supplied as a "cross-platform" (i.e. URL-style)
- path; it will be converted to the appropriate local path syntax.
- 'exclude' is a sequence of package names to exclude; '*' can be used
- as a wildcard in the names, such that 'foo.*' will exclude all
- subpackages of 'foo' (but not 'foo' itself).
-
- 'include' is a sequence of package names to include. If it's
- specified, only the named packages will be included. If it's not
- specified, all found packages will be included. 'include' can contain
- shell style wildcard patterns just like 'exclude'.
-
- The list of included packages is built up first and then any
- explicitly excluded packages are removed from it.
- """
- out = cls._find_packages_iter(convert_path(where))
- out = cls.require_parents(out)
- includes = cls._build_filter(*include)
- excludes = cls._build_filter('ez_setup', '*__pycache__', *exclude)
- out = filter(includes, out)
- out = filterfalse(excludes, out)
- return list(out)
-
- @staticmethod
- def require_parents(packages):
- """
- Exclude any apparent package that apparently doesn't include its
- parent.
-
- For example, exclude 'foo.bar' if 'foo' is not present.
- """
- found = []
- for pkg in packages:
- base, sep, child = pkg.rpartition('.')
- if base and base not in found:
- continue
- found.append(pkg)
- yield pkg
-
- @staticmethod
- def _candidate_dirs(base_path):
- """
- Return all dirs in base_path that might be packages.
- """
- has_dot = lambda name: '.' in name
- for root, dirs, files in os.walk(base_path, followlinks=True):
- # Exclude directories that contain a period, as they cannot be
- # packages. Mutate the list to avoid traversal.
- dirs[:] = filterfalse(has_dot, dirs)
- for dir in dirs:
- yield os.path.relpath(os.path.join(root, dir), base_path)
-
- @classmethod
- def _find_packages_iter(cls, base_path):
- candidates = cls._candidate_dirs(base_path)
- return (
- path.replace(os.path.sep, '.')
- for path in candidates
- if cls._looks_like_package(os.path.join(base_path, path))
- )
-
- @staticmethod
- def _looks_like_package(path):
- return os.path.isfile(os.path.join(path, '__init__.py'))
-
- @staticmethod
- def _build_filter(*patterns):
- """
- Given a list of patterns, return a callable that will be true only if
- the input matches one of the patterns.
- """
- return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns)
-
-class PEP420PackageFinder(PackageFinder):
- @staticmethod
- def _looks_like_package(path):
- return True
-
-find_packages = PackageFinder.find
-
-setup = distutils.core.setup
-
-_Command = _get_unpatched(_Command)
-
-class Command(_Command):
- __doc__ = _Command.__doc__
-
- command_consumes_arguments = False
-
- def __init__(self, dist, **kw):
- """
- Construct the command for dist, updating
- vars(self) with any keyword parameters.
- """
- _Command.__init__(self, dist)
- vars(self).update(kw)
-
- def reinitialize_command(self, command, reinit_subcommands=0, **kw):
- cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
- vars(cmd).update(kw)
- return cmd
-
-# we can't patch distutils.cmd, alas
-distutils.core.Command = Command
-
-
-def _find_all_simple(path):
- """
- Find all files under 'path'
- """
- results = (
- os.path.join(base, file)
- for base, dirs, files in os.walk(path, followlinks=True)
- for file in files
- )
- return filter(os.path.isfile, results)
-
-
-def findall(dir=os.curdir):
- """
- Find all files under 'dir' and return the list of full filenames.
- Unless dir is '.', return full filenames with dir prepended.
- """
- files = _find_all_simple(dir)
- if dir == os.curdir:
- make_rel = functools.partial(os.path.relpath, start=dir)
- files = map(make_rel, files)
- return list(files)
-
-
-# fix findall bug in distutils (http://bugs.python.org/issue12885)
-distutils.filelist.findall = findall
diff --git a/setuptools/archive_util.py b/setuptools/archive_util.py
deleted file mode 100755
index b3c9fa56..00000000
--- a/setuptools/archive_util.py
+++ /dev/null
@@ -1,170 +0,0 @@
-"""Utilities for extracting common archive formats"""
-
-
-__all__ = [
- "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
- "UnrecognizedFormat", "extraction_drivers", "unpack_directory",
-]
-
-import zipfile
-import tarfile
-import os
-import shutil
-import posixpath
-import contextlib
-from pkg_resources import ensure_directory, ContextualZipFile
-from distutils.errors import DistutilsError
-
-class UnrecognizedFormat(DistutilsError):
- """Couldn't recognize the archive type"""
-
-def default_filter(src,dst):
- """The default progress/filter callback; returns True for all files"""
- return dst
-
-
-def unpack_archive(filename, extract_dir, progress_filter=default_filter,
- drivers=None):
- """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
-
- `progress_filter` is a function taking two arguments: a source path
- internal to the archive ('/'-separated), and a filesystem path where it
- will be extracted. The callback must return the desired extract path
- (which may be the same as the one passed in), or else ``None`` to skip
- that file or directory. The callback can thus be used to report on the
- progress of the extraction, as well as to filter the items extracted or
- alter their extraction paths.
-
- `drivers`, if supplied, must be a non-empty sequence of functions with the
- same signature as this function (minus the `drivers` argument), that raise
- ``UnrecognizedFormat`` if they do not support extracting the designated
- archive type. The `drivers` are tried in sequence until one is found that
- does not raise an error, or until all are exhausted (in which case
- ``UnrecognizedFormat`` is raised). If you do not supply a sequence of
- drivers, the module's ``extraction_drivers`` constant will be used, which
- means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
- order.
- """
- for driver in drivers or extraction_drivers:
- try:
- driver(filename, extract_dir, progress_filter)
- except UnrecognizedFormat:
- continue
- else:
- return
- else:
- raise UnrecognizedFormat(
- "Not a recognized archive type: %s" % filename
- )
-
-
-def unpack_directory(filename, extract_dir, progress_filter=default_filter):
- """"Unpack" a directory, using the same interface as for archives
-
- Raises ``UnrecognizedFormat`` if `filename` is not a directory
- """
- if not os.path.isdir(filename):
- raise UnrecognizedFormat("%s is not a directory" % filename)
-
- paths = {
- filename: ('', extract_dir),
- }
- for base, dirs, files in os.walk(filename):
- src, dst = paths[base]
- for d in dirs:
- paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d)
- for f in files:
- target = os.path.join(dst, f)
- target = progress_filter(src + f, target)
- if not target:
- # skip non-files
- continue
- ensure_directory(target)
- f = os.path.join(base, f)
- shutil.copyfile(f, target)
- shutil.copystat(f, target)
-
-
-def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
- """Unpack zip `filename` to `extract_dir`
-
- Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
- by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
- of the `progress_filter` argument.
- """
-
- if not zipfile.is_zipfile(filename):
- raise UnrecognizedFormat("%s is not a zip file" % (filename,))
-
- with ContextualZipFile(filename) as z:
- for info in z.infolist():
- name = info.filename
-
- # don't extract absolute paths or ones with .. in them
- if name.startswith('/') or '..' in name.split('/'):
- continue
-
- target = os.path.join(extract_dir, *name.split('/'))
- target = progress_filter(name, target)
- if not target:
- continue
- if name.endswith('/'):
- # directory
- ensure_directory(target)
- else:
- # file
- ensure_directory(target)
- data = z.read(info.filename)
- with open(target, 'wb') as f:
- f.write(data)
- unix_attributes = info.external_attr >> 16
- if unix_attributes:
- os.chmod(target, unix_attributes)
-
-
-def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
- """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
-
- Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
- by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
- of the `progress_filter` argument.
- """
- try:
- tarobj = tarfile.open(filename)
- except tarfile.TarError:
- raise UnrecognizedFormat(
- "%s is not a compressed or uncompressed tar file" % (filename,)
- )
- with contextlib.closing(tarobj):
- # don't do any chowning!
- tarobj.chown = lambda *args: None
- for member in tarobj:
- name = member.name
- # don't extract absolute paths or ones with .. in them
- if not name.startswith('/') and '..' not in name.split('/'):
- prelim_dst = os.path.join(extract_dir, *name.split('/'))
-
- # resolve any links and to extract the link targets as normal
- # files
- while member is not None and (member.islnk() or member.issym()):
- linkpath = member.linkname
- if member.issym():
- base = posixpath.dirname(member.name)
- linkpath = posixpath.join(base, linkpath)
- linkpath = posixpath.normpath(linkpath)
- member = tarobj._getmember(linkpath)
-
- if member is not None and (member.isfile() or member.isdir()):
- final_dst = progress_filter(name, prelim_dst)
- if final_dst:
- if final_dst.endswith(os.sep):
- final_dst = final_dst[:-1]
- try:
- # XXX Ugh
- tarobj._extract_member(member, final_dst)
- except tarfile.ExtractError:
- # chown/chmod/mkfifo/mknode/makedev failed
- pass
- return True
-
-extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
diff --git a/setuptools/cli-32.exe b/setuptools/cli-32.exe
deleted file mode 100644
index b1487b78..00000000
--- a/setuptools/cli-32.exe
+++ /dev/null
Binary files differ
diff --git a/setuptools/cli-64.exe b/setuptools/cli-64.exe
deleted file mode 100644
index 675e6bf3..00000000
--- a/setuptools/cli-64.exe
+++ /dev/null
Binary files differ
diff --git a/setuptools/cli-arm-32.exe b/setuptools/cli-arm-32.exe
deleted file mode 100644
index 2f40402d..00000000
--- a/setuptools/cli-arm-32.exe
+++ /dev/null
Binary files differ
diff --git a/setuptools/cli.exe b/setuptools/cli.exe
deleted file mode 100644
index b1487b78..00000000
--- a/setuptools/cli.exe
+++ /dev/null
Binary files differ
diff --git a/setuptools/command/__init__.py b/setuptools/command/__init__.py
deleted file mode 100644
index 3fb2f6df..00000000
--- a/setuptools/command/__init__.py
+++ /dev/null
@@ -1,18 +0,0 @@
-__all__ = [
- 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
- 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
- 'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts',
- 'register', 'bdist_wininst', 'upload_docs', 'upload',
-]
-
-from distutils.command.bdist import bdist
-import sys
-
-from setuptools.command import install_scripts
-
-
-if 'egg' not in bdist.format_commands:
- bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
- bdist.format_commands.append('egg')
-
-del bdist, sys
diff --git a/setuptools/command/alias.py b/setuptools/command/alias.py
deleted file mode 100755
index 4532b1cc..00000000
--- a/setuptools/command/alias.py
+++ /dev/null
@@ -1,80 +0,0 @@
-from distutils.errors import DistutilsOptionError
-
-from setuptools.extern.six.moves import map
-
-from setuptools.command.setopt import edit_config, option_base, config_file
-
-
-def shquote(arg):
- """Quote an argument for later parsing by shlex.split()"""
- for c in '"', "'", "\\", "#":
- if c in arg:
- return repr(arg)
- if arg.split() != [arg]:
- return repr(arg)
- return arg
-
-
-class alias(option_base):
- """Define a shortcut that invokes one or more commands"""
-
- description = "define a shortcut to invoke one or more commands"
- command_consumes_arguments = True
-
- user_options = [
- ('remove', 'r', 'remove (unset) the alias'),
- ] + option_base.user_options
-
- boolean_options = option_base.boolean_options + ['remove']
-
- def initialize_options(self):
- option_base.initialize_options(self)
- self.args = None
- self.remove = None
-
- def finalize_options(self):
- option_base.finalize_options(self)
- if self.remove and len(self.args) != 1:
- raise DistutilsOptionError(
- "Must specify exactly one argument (the alias name) when "
- "using --remove"
- )
-
- def run(self):
- aliases = self.distribution.get_option_dict('aliases')
-
- if not self.args:
- print("Command Aliases")
- print("---------------")
- for alias in aliases:
- print("setup.py alias", format_alias(alias, aliases))
- return
-
- elif len(self.args) == 1:
- alias, = self.args
- if self.remove:
- command = None
- elif alias in aliases:
- print("setup.py alias", format_alias(alias, aliases))
- return
- else:
- print("No alias definition found for %r" % alias)
- return
- else:
- alias = self.args[0]
- command = ' '.join(map(shquote, self.args[1:]))
-
- edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run)
-
-
-def format_alias(name, aliases):
- source, command = aliases[name]
- if source == config_file('global'):
- source = '--global-config '
- elif source == config_file('user'):
- source = '--user-config '
- elif source == config_file('local'):
- source = ''
- else:
- source = '--filename=%r' % source
- return source + name + ' ' + command
diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
deleted file mode 100644
index 9cebd7fa..00000000
--- a/setuptools/command/bdist_egg.py
+++ /dev/null
@@ -1,471 +0,0 @@
-"""setuptools.command.bdist_egg
-
-Build .egg distributions"""
-
-from distutils.errors import DistutilsSetupError
-from distutils.dir_util import remove_tree, mkpath
-from distutils import log
-from types import CodeType
-import sys
-import os
-import marshal
-import textwrap
-
-from setuptools.extern import six
-
-from pkg_resources import get_build_platform, Distribution, ensure_directory
-from pkg_resources import EntryPoint
-from setuptools.extension import Library
-from setuptools import Command
-
-try:
- # Python 2.7 or >=3.2
- from sysconfig import get_path, get_python_version
-
- def _get_purelib():
- return get_path("purelib")
-except ImportError:
- from distutils.sysconfig import get_python_lib, get_python_version
-
- def _get_purelib():
- return get_python_lib(False)
-
-
-def strip_module(filename):
- if '.' in filename:
- filename = os.path.splitext(filename)[0]
- if filename.endswith('module'):
- filename = filename[:-6]
- return filename
-
-
-def write_stub(resource, pyfile):
- _stub_template = textwrap.dedent("""
- def __bootstrap__():
- global __bootstrap__, __loader__, __file__
- import sys, pkg_resources, imp
- __file__ = pkg_resources.resource_filename(__name__, %r)
- __loader__ = None; del __bootstrap__, __loader__
- imp.load_dynamic(__name__,__file__)
- __bootstrap__()
- """).lstrip()
- with open(pyfile, 'w') as f:
- f.write(_stub_template % resource)
-
-
-class bdist_egg(Command):
- description = "create an \"egg\" distribution"
-
- user_options = [
- ('bdist-dir=', 'b',
- "temporary directory for creating the distribution"),
- ('plat-name=', 'p', "platform name to embed in generated filenames "
- "(default: %s)" % get_build_platform()),
- ('exclude-source-files', None,
- "remove all .py files from the generated egg"),
- ('keep-temp', 'k',
- "keep the pseudo-installation tree around after " +
- "creating the distribution archive"),
- ('dist-dir=', 'd',
- "directory to put final built distributions in"),
- ('skip-build', None,
- "skip rebuilding everything (for testing/debugging)"),
- ]
-
- boolean_options = [
- 'keep-temp', 'skip-build', 'exclude-source-files'
- ]
-
- def initialize_options(self):
- self.bdist_dir = None
- self.plat_name = None
- self.keep_temp = 0
- self.dist_dir = None
- self.skip_build = 0
- self.egg_output = None
- self.exclude_source_files = None
-
- def finalize_options(self):
- ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
- self.egg_info = ei_cmd.egg_info
-
- if self.bdist_dir is None:
- bdist_base = self.get_finalized_command('bdist').bdist_base
- self.bdist_dir = os.path.join(bdist_base, 'egg')
-
- if self.plat_name is None:
- self.plat_name = get_build_platform()
-
- self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
-
- if self.egg_output is None:
-
- # Compute filename of the output egg
- basename = Distribution(
- None, None, ei_cmd.egg_name, ei_cmd.egg_version,
- get_python_version(),
- self.distribution.has_ext_modules() and self.plat_name
- ).egg_name()
-
- self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
-
- def do_install_data(self):
- # Hack for packages that install data to install's --install-lib
- self.get_finalized_command('install').install_lib = self.bdist_dir
-
- site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
- old, self.distribution.data_files = self.distribution.data_files, []
-
- for item in old:
- if isinstance(item, tuple) and len(item) == 2:
- if os.path.isabs(item[0]):
- realpath = os.path.realpath(item[0])
- normalized = os.path.normcase(realpath)
- if normalized == site_packages or normalized.startswith(
- site_packages + os.sep
- ):
- item = realpath[len(site_packages) + 1:], item[1]
- # XXX else: raise ???
- self.distribution.data_files.append(item)
-
- try:
- log.info("installing package data to %s" % self.bdist_dir)
- self.call_command('install_data', force=0, root=None)
- finally:
- self.distribution.data_files = old
-
- def get_outputs(self):
- return [self.egg_output]
-
- def call_command(self, cmdname, **kw):
- """Invoke reinitialized command `cmdname` with keyword args"""
- for dirname in INSTALL_DIRECTORY_ATTRS:
- kw.setdefault(dirname, self.bdist_dir)
- kw.setdefault('skip_build', self.skip_build)
- kw.setdefault('dry_run', self.dry_run)
- cmd = self.reinitialize_command(cmdname, **kw)
- self.run_command(cmdname)
- return cmd
-
- def run(self):
- # Generate metadata first
- self.run_command("egg_info")
- # We run install_lib before install_data, because some data hacks
- # pull their data path from the install_lib command.
- log.info("installing library code to %s" % self.bdist_dir)
- instcmd = self.get_finalized_command('install')
- old_root = instcmd.root
- instcmd.root = None
- if self.distribution.has_c_libraries() and not self.skip_build:
- self.run_command('build_clib')
- cmd = self.call_command('install_lib', warn_dir=0)
- instcmd.root = old_root
-
- all_outputs, ext_outputs = self.get_ext_outputs()
- self.stubs = []
- to_compile = []
- for (p, ext_name) in enumerate(ext_outputs):
- filename, ext = os.path.splitext(ext_name)
- pyfile = os.path.join(self.bdist_dir, strip_module(filename) +
- '.py')
- self.stubs.append(pyfile)
- log.info("creating stub loader for %s" % ext_name)
- if not self.dry_run:
- write_stub(os.path.basename(ext_name), pyfile)
- to_compile.append(pyfile)
- ext_outputs[p] = ext_name.replace(os.sep, '/')
-
- if to_compile:
- cmd.byte_compile(to_compile)
- if self.distribution.data_files:
- self.do_install_data()
-
- # Make the EGG-INFO directory
- archive_root = self.bdist_dir
- egg_info = os.path.join(archive_root, 'EGG-INFO')
- self.mkpath(egg_info)
- if self.distribution.scripts:
- script_dir = os.path.join(egg_info, 'scripts')
- log.info("installing scripts to %s" % script_dir)
- self.call_command('install_scripts', install_dir=script_dir,
- no_ep=1)
-
- self.copy_metadata_to(egg_info)
- native_libs = os.path.join(egg_info, "native_libs.txt")
- if all_outputs:
- log.info("writing %s" % native_libs)
- if not self.dry_run:
- ensure_directory(native_libs)
- libs_file = open(native_libs, 'wt')
- libs_file.write('\n'.join(all_outputs))
- libs_file.write('\n')
- libs_file.close()
- elif os.path.isfile(native_libs):
- log.info("removing %s" % native_libs)
- if not self.dry_run:
- os.unlink(native_libs)
-
- write_safety_flag(
- os.path.join(archive_root, 'EGG-INFO'), self.zip_safe()
- )
-
- if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
- log.warn(
- "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
- "Use the install_requires/extras_require setup() args instead."
- )
-
- if self.exclude_source_files:
- self.zap_pyfiles()
-
- # Make the archive
- make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
- dry_run=self.dry_run, mode=self.gen_header())
- if not self.keep_temp:
- remove_tree(self.bdist_dir, dry_run=self.dry_run)
-
- # Add to 'Distribution.dist_files' so that the "upload" command works
- getattr(self.distribution, 'dist_files', []).append(
- ('bdist_egg', get_python_version(), self.egg_output))
-
- def zap_pyfiles(self):
- log.info("Removing .py files from temporary directory")
- for base, dirs, files in walk_egg(self.bdist_dir):
- for name in files:
- if name.endswith('.py'):
- path = os.path.join(base, name)
- log.debug("Deleting %s", path)
- os.unlink(path)
-
- def zip_safe(self):
- safe = getattr(self.distribution, 'zip_safe', None)
- if safe is not None:
- return safe
- log.warn("zip_safe flag not set; analyzing archive contents...")
- return analyze_egg(self.bdist_dir, self.stubs)
-
- def gen_header(self):
- epm = EntryPoint.parse_map(self.distribution.entry_points or '')
- ep = epm.get('setuptools.installation', {}).get('eggsecutable')
- if ep is None:
- return 'w' # not an eggsecutable, do it the usual way.
-
- if not ep.attrs or ep.extras:
- raise DistutilsSetupError(
- "eggsecutable entry point (%r) cannot have 'extras' "
- "or refer to a module" % (ep,)
- )
-
- pyver = sys.version[:3]
- pkg = ep.module_name
- full = '.'.join(ep.attrs)
- base = ep.attrs[0]
- basename = os.path.basename(self.egg_output)
-
- header = (
- "#!/bin/sh\n"
- 'if [ `basename $0` = "%(basename)s" ]\n'
- 'then exec python%(pyver)s -c "'
- "import sys, os; sys.path.insert(0, os.path.abspath('$0')); "
- "from %(pkg)s import %(base)s; sys.exit(%(full)s())"
- '" "$@"\n'
- 'else\n'
- ' echo $0 is not the correct name for this egg file.\n'
- ' echo Please rename it back to %(basename)s and try again.\n'
- ' exec false\n'
- 'fi\n'
- ) % locals()
-
- if not self.dry_run:
- mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)
- f = open(self.egg_output, 'w')
- f.write(header)
- f.close()
- return 'a'
-
- def copy_metadata_to(self, target_dir):
- "Copy metadata (egg info) to the target_dir"
- # normalize the path (so that a forward-slash in egg_info will
- # match using startswith below)
- norm_egg_info = os.path.normpath(self.egg_info)
- prefix = os.path.join(norm_egg_info, '')
- for path in self.ei_cmd.filelist.files:
- if path.startswith(prefix):
- target = os.path.join(target_dir, path[len(prefix):])
- ensure_directory(target)
- self.copy_file(path, target)
-
- def get_ext_outputs(self):
- """Get a list of relative paths to C extensions in the output distro"""
-
- all_outputs = []
- ext_outputs = []
-
- paths = {self.bdist_dir: ''}
- for base, dirs, files in os.walk(self.bdist_dir):
- for filename in files:
- if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
- all_outputs.append(paths[base] + filename)
- for filename in dirs:
- paths[os.path.join(base, filename)] = (paths[base] +
- filename + '/')
-
- if self.distribution.has_ext_modules():
- build_cmd = self.get_finalized_command('build_ext')
- for ext in build_cmd.extensions:
- if isinstance(ext, Library):
- continue
- fullname = build_cmd.get_ext_fullname(ext.name)
- filename = build_cmd.get_ext_filename(fullname)
- if not os.path.basename(filename).startswith('dl-'):
- if os.path.exists(os.path.join(self.bdist_dir, filename)):
- ext_outputs.append(filename)
-
- return all_outputs, ext_outputs
-
-
-NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
-
-
-def walk_egg(egg_dir):
- """Walk an unpacked egg's contents, skipping the metadata directory"""
- walker = os.walk(egg_dir)
- base, dirs, files = next(walker)
- if 'EGG-INFO' in dirs:
- dirs.remove('EGG-INFO')
- yield base, dirs, files
- for bdf in walker:
- yield bdf
-
-
-def analyze_egg(egg_dir, stubs):
- # check for existing flag in EGG-INFO
- for flag, fn in safety_flags.items():
- if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):
- return flag
- if not can_scan():
- return False
- safe = True
- for base, dirs, files in walk_egg(egg_dir):
- for name in files:
- if name.endswith('.py') or name.endswith('.pyw'):
- continue
- elif name.endswith('.pyc') or name.endswith('.pyo'):
- # always scan, even if we already know we're not safe
- safe = scan_module(egg_dir, base, name, stubs) and safe
- return safe
-
-
-def write_safety_flag(egg_dir, safe):
- # Write or remove zip safety flag file(s)
- for flag, fn in safety_flags.items():
- fn = os.path.join(egg_dir, fn)
- if os.path.exists(fn):
- if safe is None or bool(safe) != flag:
- os.unlink(fn)
- elif safe is not None and bool(safe) == flag:
- f = open(fn, 'wt')
- f.write('\n')
- f.close()
-
-
-safety_flags = {
- True: 'zip-safe',
- False: 'not-zip-safe',
-}
-
-
-def scan_module(egg_dir, base, name, stubs):
- """Check whether module possibly uses unsafe-for-zipfile stuff"""
-
- filename = os.path.join(base, name)
- if filename[:-1] in stubs:
- return True # Extension module
- pkg = base[len(egg_dir) + 1:].replace(os.sep, '.')
- module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
- if sys.version_info < (3, 3):
- skip = 8 # skip magic & date
- else:
- skip = 12 # skip magic & date & file size
- f = open(filename, 'rb')
- f.read(skip)
- code = marshal.load(f)
- f.close()
- safe = True
- symbols = dict.fromkeys(iter_symbols(code))
- for bad in ['__file__', '__path__']:
- if bad in symbols:
- log.warn("%s: module references %s", module, bad)
- safe = False
- if 'inspect' in symbols:
- for bad in [
- 'getsource', 'getabsfile', 'getsourcefile', 'getfile'
- 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
- 'getinnerframes', 'getouterframes', 'stack', 'trace'
- ]:
- if bad in symbols:
- log.warn("%s: module MAY be using inspect.%s", module, bad)
- safe = False
- return safe
-
-
-def iter_symbols(code):
- """Yield names and strings used by `code` and its nested code objects"""
- for name in code.co_names:
- yield name
- for const in code.co_consts:
- if isinstance(const, six.string_types):
- yield const
- elif isinstance(const, CodeType):
- for name in iter_symbols(const):
- yield name
-
-
-def can_scan():
- if not sys.platform.startswith('java') and sys.platform != 'cli':
- # CPython, PyPy, etc.
- return True
- log.warn("Unable to analyze compiled code on this platform.")
- log.warn("Please ask the author to include a 'zip_safe'"
- " setting (either True or False) in the package's setup.py")
-
-# Attribute names of options for commands that might need to be convinced to
-# install to the egg build directory
-
-INSTALL_DIRECTORY_ATTRS = [
- 'install_lib', 'install_dir', 'install_data', 'install_base'
-]
-
-
-def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True,
- mode='w'):
- """Create a zip file from all the files under 'base_dir'. The output
- zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
- Python module (if available) or the InfoZIP "zip" utility (if installed
- and found on the default search path). If neither tool is available,
- raises DistutilsExecError. Returns the name of the output zip file.
- """
- import zipfile
-
- mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
- log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
-
- def visit(z, dirname, names):
- for name in names:
- path = os.path.normpath(os.path.join(dirname, name))
- if os.path.isfile(path):
- p = path[len(base_dir) + 1:]
- if not dry_run:
- z.write(path, p)
- log.debug("adding '%s'" % p)
-
- compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
- if not dry_run:
- z = zipfile.ZipFile(zip_filename, mode, compression=compression)
- for dirname, dirs, files in os.walk(base_dir):
- visit(z, dirname, files)
- z.close()
- else:
- for dirname, dirs, files in os.walk(base_dir):
- visit(None, dirname, files)
- return zip_filename
diff --git a/setuptools/command/bdist_rpm.py b/setuptools/command/bdist_rpm.py
deleted file mode 100755
index 70730927..00000000
--- a/setuptools/command/bdist_rpm.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import distutils.command.bdist_rpm as orig
-
-
-class bdist_rpm(orig.bdist_rpm):
- """
- Override the default bdist_rpm behavior to do the following:
-
- 1. Run egg_info to ensure the name and version are properly calculated.
- 2. Always run 'install' using --single-version-externally-managed to
- disable eggs in RPM distributions.
- 3. Replace dash with underscore in the version numbers for better RPM
- compatibility.
- """
-
- def run(self):
- # ensure distro name is up-to-date
- self.run_command('egg_info')
-
- orig.bdist_rpm.run(self)
-
- def _make_spec_file(self):
- version = self.distribution.get_version()
- rpmversion = version.replace('-', '_')
- spec = orig.bdist_rpm._make_spec_file(self)
- line23 = '%define version ' + version
- line24 = '%define version ' + rpmversion
- spec = [
- line.replace(
- "Source0: %{name}-%{version}.tar",
- "Source0: %{name}-%{unmangled_version}.tar"
- ).replace(
- "setup.py install ",
- "setup.py install --single-version-externally-managed "
- ).replace(
- "%setup",
- "%setup -n %{name}-%{unmangled_version}"
- ).replace(line23, line24)
- for line in spec
- ]
- insert_loc = spec.index(line24) + 1
- unmangled_version = "%define unmangled_version " + version
- spec.insert(insert_loc, unmangled_version)
- return spec
diff --git a/setuptools/command/bdist_wininst.py b/setuptools/command/bdist_wininst.py
deleted file mode 100755
index 073de97b..00000000
--- a/setuptools/command/bdist_wininst.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import distutils.command.bdist_wininst as orig
-
-
-class bdist_wininst(orig.bdist_wininst):
- def reinitialize_command(self, command, reinit_subcommands=0):
- """
- Supplement reinitialize_command to work around
- http://bugs.python.org/issue20819
- """
- cmd = self.distribution.reinitialize_command(
- command, reinit_subcommands)
- if command in ('install', 'install_lib'):
- cmd.install_lib = None
- return cmd
-
- def run(self):
- self._is_running = True
- try:
- orig.bdist_wininst.run(self)
- finally:
- self._is_running = False
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
deleted file mode 100644
index 92e4a189..00000000
--- a/setuptools/command/build_ext.py
+++ /dev/null
@@ -1,296 +0,0 @@
-from distutils.command.build_ext import build_ext as _du_build_ext
-from distutils.file_util import copy_file
-from distutils.ccompiler import new_compiler
-from distutils.sysconfig import customize_compiler
-from distutils.errors import DistutilsError
-from distutils import log
-import os
-import sys
-import itertools
-
-from setuptools.extension import Library
-
-try:
- # Attempt to use Cython for building extensions, if available
- from Cython.Distutils.build_ext import build_ext as _build_ext
-except ImportError:
- _build_ext = _du_build_ext
-
-try:
- # Python 2.7 or >=3.2
- from sysconfig import _CONFIG_VARS
-except ImportError:
- from distutils.sysconfig import get_config_var
-
- get_config_var("LDSHARED") # make sure _config_vars is initialized
- del get_config_var
- from distutils.sysconfig import _config_vars as _CONFIG_VARS
-
-have_rtld = False
-use_stubs = False
-libtype = 'shared'
-
-if sys.platform == "darwin":
- use_stubs = True
-elif os.name != 'nt':
- try:
- import dl
- use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')
- except ImportError:
- pass
-
-
-if_dl = lambda s: s if have_rtld else ''
-
-class build_ext(_build_ext):
- def run(self):
- """Build extensions in build directory, then copy if --inplace"""
- old_inplace, self.inplace = self.inplace, 0
- _build_ext.run(self)
- self.inplace = old_inplace
- if old_inplace:
- self.copy_extensions_to_source()
-
- def copy_extensions_to_source(self):
- build_py = self.get_finalized_command('build_py')
- for ext in self.extensions:
- fullname = self.get_ext_fullname(ext.name)
- filename = self.get_ext_filename(fullname)
- modpath = fullname.split('.')
- package = '.'.join(modpath[:-1])
- package_dir = build_py.get_package_dir(package)
- dest_filename = os.path.join(package_dir,
- os.path.basename(filename))
- src_filename = os.path.join(self.build_lib, filename)
-
- # Always copy, even if source is older than destination, to ensure
- # that the right extensions for the current Python/platform are
- # used.
- copy_file(
- src_filename, dest_filename, verbose=self.verbose,
- dry_run=self.dry_run
- )
- if ext._needs_stub:
- self.write_stub(package_dir or os.curdir, ext, True)
-
- def get_ext_filename(self, fullname):
- filename = _build_ext.get_ext_filename(self, fullname)
- if fullname in self.ext_map:
- ext = self.ext_map[fullname]
- if isinstance(ext, Library):
- fn, ext = os.path.splitext(filename)
- return self.shlib_compiler.library_filename(fn, libtype)
- elif use_stubs and ext._links_to_dynamic:
- d, fn = os.path.split(filename)
- return os.path.join(d, 'dl-' + fn)
- return filename
-
- def initialize_options(self):
- _build_ext.initialize_options(self)
- self.shlib_compiler = None
- self.shlibs = []
- self.ext_map = {}
-
- def finalize_options(self):
- _build_ext.finalize_options(self)
- self.extensions = self.extensions or []
- self.check_extensions_list(self.extensions)
- self.shlibs = [ext for ext in self.extensions
- if isinstance(ext, Library)]
- if self.shlibs:
- self.setup_shlib_compiler()
- for ext in self.extensions:
- ext._full_name = self.get_ext_fullname(ext.name)
- for ext in self.extensions:
- fullname = ext._full_name
- self.ext_map[fullname] = ext
-
- # distutils 3.1 will also ask for module names
- # XXX what to do with conflicts?
- self.ext_map[fullname.split('.')[-1]] = ext
-
- ltd = self.shlibs and self.links_to_dynamic(ext) or False
- ns = ltd and use_stubs and not isinstance(ext, Library)
- ext._links_to_dynamic = ltd
- ext._needs_stub = ns
- filename = ext._file_name = self.get_ext_filename(fullname)
- libdir = os.path.dirname(os.path.join(self.build_lib, filename))
- if ltd and libdir not in ext.library_dirs:
- ext.library_dirs.append(libdir)
- if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
- ext.runtime_library_dirs.append(os.curdir)
-
- def setup_shlib_compiler(self):
- compiler = self.shlib_compiler = new_compiler(
- compiler=self.compiler, dry_run=self.dry_run, force=self.force
- )
- if sys.platform == "darwin":
- tmp = _CONFIG_VARS.copy()
- try:
- # XXX Help! I don't have any idea whether these are right...
- _CONFIG_VARS['LDSHARED'] = (
- "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup")
- _CONFIG_VARS['CCSHARED'] = " -dynamiclib"
- _CONFIG_VARS['SO'] = ".dylib"
- customize_compiler(compiler)
- finally:
- _CONFIG_VARS.clear()
- _CONFIG_VARS.update(tmp)
- else:
- customize_compiler(compiler)
-
- if self.include_dirs is not None:
- compiler.set_include_dirs(self.include_dirs)
- if self.define is not None:
- # 'define' option is a list of (name,value) tuples
- for (name, value) in self.define:
- compiler.define_macro(name, value)
- if self.undef is not None:
- for macro in self.undef:
- compiler.undefine_macro(macro)
- if self.libraries is not None:
- compiler.set_libraries(self.libraries)
- if self.library_dirs is not None:
- compiler.set_library_dirs(self.library_dirs)
- if self.rpath is not None:
- compiler.set_runtime_library_dirs(self.rpath)
- if self.link_objects is not None:
- compiler.set_link_objects(self.link_objects)
-
- # hack so distutils' build_extension() builds a library instead
- compiler.link_shared_object = link_shared_object.__get__(compiler)
-
- def get_export_symbols(self, ext):
- if isinstance(ext, Library):
- return ext.export_symbols
- return _build_ext.get_export_symbols(self, ext)
-
- def build_extension(self, ext):
- ext._convert_pyx_sources_to_lang()
- _compiler = self.compiler
- try:
- if isinstance(ext, Library):
- self.compiler = self.shlib_compiler
- _build_ext.build_extension(self, ext)
- if ext._needs_stub:
- cmd = self.get_finalized_command('build_py').build_lib
- self.write_stub(cmd, ext)
- finally:
- self.compiler = _compiler
-
- def links_to_dynamic(self, ext):
- """Return true if 'ext' links to a dynamic lib in the same package"""
- # XXX this should check to ensure the lib is actually being built
- # XXX as dynamic, and not just using a locally-found version or a
- # XXX static-compiled version
- libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
- pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
- return any(pkg + libname in libnames for libname in ext.libraries)
-
- def get_outputs(self):
- return _build_ext.get_outputs(self) + self.__get_stubs_outputs()
-
- def __get_stubs_outputs(self):
- # assemble the base name for each extension that needs a stub
- ns_ext_bases = (
- os.path.join(self.build_lib, *ext._full_name.split('.'))
- for ext in self.extensions
- if ext._needs_stub
- )
- # pair each base with the extension
- pairs = itertools.product(ns_ext_bases, self.__get_output_extensions())
- return list(base + fnext for base, fnext in pairs)
-
- def __get_output_extensions(self):
- yield '.py'
- yield '.pyc'
- if self.get_finalized_command('build_py').optimize:
- yield '.pyo'
-
- def write_stub(self, output_dir, ext, compile=False):
- log.info("writing stub loader for %s to %s", ext._full_name,
- output_dir)
- stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) +
- '.py')
- if compile and os.path.exists(stub_file):
- raise DistutilsError(stub_file + " already exists! Please delete.")
- if not self.dry_run:
- f = open(stub_file, 'w')
- f.write(
- '\n'.join([
- "def __bootstrap__():",
- " global __bootstrap__, __file__, __loader__",
- " import sys, os, pkg_resources, imp" + if_dl(", dl"),
- " __file__ = pkg_resources.resource_filename"
- "(__name__,%r)"
- % os.path.basename(ext._file_name),
- " del __bootstrap__",
- " if '__loader__' in globals():",
- " del __loader__",
- if_dl(" old_flags = sys.getdlopenflags()"),
- " old_dir = os.getcwd()",
- " try:",
- " os.chdir(os.path.dirname(__file__))",
- if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
- " imp.load_dynamic(__name__,__file__)",
- " finally:",
- if_dl(" sys.setdlopenflags(old_flags)"),
- " os.chdir(old_dir)",
- "__bootstrap__()",
- "" # terminal \n
- ])
- )
- f.close()
- if compile:
- from distutils.util import byte_compile
-
- byte_compile([stub_file], optimize=0,
- force=True, dry_run=self.dry_run)
- optimize = self.get_finalized_command('install_lib').optimize
- if optimize > 0:
- byte_compile([stub_file], optimize=optimize,
- force=True, dry_run=self.dry_run)
- if os.path.exists(stub_file) and not self.dry_run:
- os.unlink(stub_file)
-
-
-if use_stubs or os.name == 'nt':
- # Build shared libraries
- #
- def link_shared_object(
- self, objects, output_libname, output_dir=None, libraries=None,
- library_dirs=None, runtime_library_dirs=None, export_symbols=None,
- debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
- target_lang=None):
- self.link(
- self.SHARED_LIBRARY, objects, output_libname,
- output_dir, libraries, library_dirs, runtime_library_dirs,
- export_symbols, debug, extra_preargs, extra_postargs,
- build_temp, target_lang
- )
-else:
- # Build static libraries everywhere else
- libtype = 'static'
-
- def link_shared_object(
- self, objects, output_libname, output_dir=None, libraries=None,
- library_dirs=None, runtime_library_dirs=None, export_symbols=None,
- debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
- target_lang=None):
- # XXX we need to either disallow these attrs on Library instances,
- # or warn/abort here if set, or something...
- # libraries=None, library_dirs=None, runtime_library_dirs=None,
- # export_symbols=None, extra_preargs=None, extra_postargs=None,
- # build_temp=None
-
- assert output_dir is None # distutils build_ext doesn't pass this
- output_dir, filename = os.path.split(output_libname)
- basename, ext = os.path.splitext(filename)
- if self.library_filename("x").startswith('lib'):
- # strip 'lib' prefix; this is kludgy if some platform uses
- # a different prefix
- basename = basename[3:]
-
- self.create_static_lib(
- objects, basename, output_dir, debug, target_lang
- )
diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py
deleted file mode 100644
index 8623c777..00000000
--- a/setuptools/command/build_py.py
+++ /dev/null
@@ -1,222 +0,0 @@
-from glob import glob
-from distutils.util import convert_path
-import distutils.command.build_py as orig
-import os
-import fnmatch
-import textwrap
-import io
-import distutils.errors
-import collections
-import itertools
-
-from setuptools.extern.six.moves import map
-
-try:
- from setuptools.lib2to3_ex import Mixin2to3
-except ImportError:
- class Mixin2to3:
- def run_2to3(self, files, doctests=True):
- "do nothing"
-
-
-class build_py(orig.build_py, Mixin2to3):
- """Enhanced 'build_py' command that includes data files with packages
-
- The data files are specified via a 'package_data' argument to 'setup()'.
- See 'setuptools.dist.Distribution' for more details.
-
- Also, this version of the 'build_py' command allows you to specify both
- 'py_modules' and 'packages' in the same setup operation.
- """
-
- def finalize_options(self):
- orig.build_py.finalize_options(self)
- self.package_data = self.distribution.package_data
- self.exclude_package_data = (self.distribution.exclude_package_data or
- {})
- if 'data_files' in self.__dict__:
- del self.__dict__['data_files']
- self.__updated_files = []
- self.__doctests_2to3 = []
-
- def run(self):
- """Build modules, packages, and copy data files to build directory"""
- if not self.py_modules and not self.packages:
- return
-
- if self.py_modules:
- self.build_modules()
-
- if self.packages:
- self.build_packages()
- self.build_package_data()
-
- self.run_2to3(self.__updated_files, False)
- self.run_2to3(self.__updated_files, True)
- self.run_2to3(self.__doctests_2to3, True)
-
- # Only compile actual .py files, using our base class' idea of what our
- # output files are.
- self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
-
- def __getattr__(self, attr):
- "lazily compute data files"
- if attr == 'data_files':
- self.data_files = self._get_data_files()
- return self.data_files
- return orig.build_py.__getattr__(self, attr)
-
- def build_module(self, module, module_file, package):
- outfile, copied = orig.build_py.build_module(self, module, module_file,
- package)
- if copied:
- self.__updated_files.append(outfile)
- return outfile, copied
-
- def _get_data_files(self):
- """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
- self.analyze_manifest()
- return list(map(self._get_pkg_data_files, self.packages or ()))
-
- def _get_pkg_data_files(self, package):
- # Locate package source directory
- src_dir = self.get_package_dir(package)
-
- # Compute package build directory
- build_dir = os.path.join(*([self.build_lib] + package.split('.')))
-
- # Strip directory from globbed filenames
- filenames = [
- os.path.relpath(file, src_dir)
- for file in self.find_data_files(package, src_dir)
- ]
- return package, src_dir, build_dir, filenames
-
- def find_data_files(self, package, src_dir):
- """Return filenames for package's data files in 'src_dir'"""
- globs = (self.package_data.get('', [])
- + self.package_data.get(package, []))
- files = self.manifest_files.get(package, [])[:]
- for pattern in globs:
- # Each pattern has to be converted to a platform-specific path
- files.extend(glob(os.path.join(src_dir, convert_path(pattern))))
- return self.exclude_data_files(package, src_dir, files)
-
- def build_package_data(self):
- """Copy data files into build directory"""
- for package, src_dir, build_dir, filenames in self.data_files:
- for filename in filenames:
- target = os.path.join(build_dir, filename)
- self.mkpath(os.path.dirname(target))
- srcfile = os.path.join(src_dir, filename)
- outf, copied = self.copy_file(srcfile, target)
- srcfile = os.path.abspath(srcfile)
- if (copied and
- srcfile in self.distribution.convert_2to3_doctests):
- self.__doctests_2to3.append(outf)
-
- def analyze_manifest(self):
- self.manifest_files = mf = {}
- if not self.distribution.include_package_data:
- return
- src_dirs = {}
- for package in self.packages or ():
- # Locate package source directory
- src_dirs[assert_relative(self.get_package_dir(package))] = package
-
- self.run_command('egg_info')
- ei_cmd = self.get_finalized_command('egg_info')
- for path in ei_cmd.filelist.files:
- d, f = os.path.split(assert_relative(path))
- prev = None
- oldf = f
- while d and d != prev and d not in src_dirs:
- prev = d
- d, df = os.path.split(d)
- f = os.path.join(df, f)
- if d in src_dirs:
- if path.endswith('.py') and f == oldf:
- continue # it's a module, not data
- mf.setdefault(src_dirs[d], []).append(path)
-
- def get_data_files(self):
- pass # Lazily compute data files in _get_data_files() function.
-
- def check_package(self, package, package_dir):
- """Check namespace packages' __init__ for declare_namespace"""
- try:
- return self.packages_checked[package]
- except KeyError:
- pass
-
- init_py = orig.build_py.check_package(self, package, package_dir)
- self.packages_checked[package] = init_py
-
- if not init_py or not self.distribution.namespace_packages:
- return init_py
-
- for pkg in self.distribution.namespace_packages:
- if pkg == package or pkg.startswith(package + '.'):
- break
- else:
- return init_py
-
- with io.open(init_py, 'rb') as f:
- contents = f.read()
- if b'declare_namespace' not in contents:
- raise distutils.errors.DistutilsError(
- "Namespace package problem: %s is a namespace package, but "
- "its\n__init__.py does not call declare_namespace()! Please "
- 'fix it.\n(See the setuptools manual under '
- '"Namespace Packages" for details.)\n"' % (package,)
- )
- return init_py
-
- def initialize_options(self):
- self.packages_checked = {}
- orig.build_py.initialize_options(self)
-
- def get_package_dir(self, package):
- res = orig.build_py.get_package_dir(self, package)
- if self.distribution.src_root is not None:
- return os.path.join(self.distribution.src_root, res)
- return res
-
- def exclude_data_files(self, package, src_dir, files):
- """Filter filenames for package's data files in 'src_dir'"""
- globs = (
- self.exclude_package_data.get('', [])
- + self.exclude_package_data.get(package, [])
- )
- bad = set(
- item
- for pattern in globs
- for item in fnmatch.filter(
- files,
- os.path.join(src_dir, convert_path(pattern)),
- )
- )
- seen = collections.defaultdict(itertools.count)
- return [
- fn
- for fn in files
- if fn not in bad
- # ditch dupes
- and not next(seen[fn])
- ]
-
-
-def assert_relative(path):
- if not os.path.isabs(path):
- return path
- from distutils.errors import DistutilsSetupError
-
- msg = textwrap.dedent("""
- Error: setup script specifies an absolute path:
-
- %s
-
- setup() arguments must *always* be /-separated paths relative to the
- setup.py directory, *never* absolute paths.
- """).lstrip() % path
- raise DistutilsSetupError(msg)
diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py
deleted file mode 100755
index 11b5df10..00000000
--- a/setuptools/command/develop.py
+++ /dev/null
@@ -1,196 +0,0 @@
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import DistutilsError, DistutilsOptionError
-import os
-import glob
-import io
-
-from setuptools.extern import six
-
-from pkg_resources import Distribution, PathMetadata, normalize_path
-from setuptools.command.easy_install import easy_install
-import setuptools
-
-
-class develop(easy_install):
- """Set up package for development"""
-
- description = "install package in 'development mode'"
-
- user_options = easy_install.user_options + [
- ("uninstall", "u", "Uninstall this source package"),
- ("egg-path=", None, "Set the path to be used in the .egg-link file"),
- ]
-
- boolean_options = easy_install.boolean_options + ['uninstall']
-
- command_consumes_arguments = False # override base
-
- def run(self):
- if self.uninstall:
- self.multi_version = True
- self.uninstall_link()
- else:
- self.install_for_development()
- self.warn_deprecated_options()
-
- def initialize_options(self):
- self.uninstall = None
- self.egg_path = None
- easy_install.initialize_options(self)
- self.setup_path = None
- self.always_copy_from = '.' # always copy eggs installed in curdir
-
- def finalize_options(self):
- ei = self.get_finalized_command("egg_info")
- if ei.broken_egg_info:
- template = "Please rename %r to %r before using 'develop'"
- args = ei.egg_info, ei.broken_egg_info
- raise DistutilsError(template % args)
- self.args = [ei.egg_name]
-
- easy_install.finalize_options(self)
- self.expand_basedirs()
- self.expand_dirs()
- # pick up setup-dir .egg files only: no .egg-info
- self.package_index.scan(glob.glob('*.egg'))
-
- egg_link_fn = ei.egg_name + '.egg-link'
- self.egg_link = os.path.join(self.install_dir, egg_link_fn)
- self.egg_base = ei.egg_base
- if self.egg_path is None:
- self.egg_path = os.path.abspath(ei.egg_base)
-
- target = normalize_path(self.egg_base)
- egg_path = normalize_path(os.path.join(self.install_dir,
- self.egg_path))
- if egg_path != target:
- raise DistutilsOptionError(
- "--egg-path must be a relative path from the install"
- " directory to " + target
- )
-
- # Make a distribution for the package's source
- self.dist = Distribution(
- target,
- PathMetadata(target, os.path.abspath(ei.egg_info)),
- project_name=ei.egg_name
- )
-
- p = self.egg_base.replace(os.sep, '/')
- if p != os.curdir:
- p = '../' * (p.count('/') + 1)
- self.setup_path = p
- p = normalize_path(os.path.join(self.install_dir, self.egg_path, p))
- if p != normalize_path(os.curdir):
- raise DistutilsOptionError(
- "Can't get a consistent path to setup script from"
- " installation directory", p, normalize_path(os.curdir))
-
- def install_for_development(self):
- if six.PY3 and getattr(self.distribution, 'use_2to3', False):
- # If we run 2to3 we can not do this inplace:
-
- # Ensure metadata is up-to-date
- self.reinitialize_command('build_py', inplace=0)
- self.run_command('build_py')
- bpy_cmd = self.get_finalized_command("build_py")
- build_path = normalize_path(bpy_cmd.build_lib)
-
- # Build extensions
- self.reinitialize_command('egg_info', egg_base=build_path)
- self.run_command('egg_info')
-
- self.reinitialize_command('build_ext', inplace=0)
- self.run_command('build_ext')
-
- # Fixup egg-link and easy-install.pth
- ei_cmd = self.get_finalized_command("egg_info")
- self.egg_path = build_path
- self.dist.location = build_path
- # XXX
- self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info)
- else:
- # Without 2to3 inplace works fine:
- self.run_command('egg_info')
-
- # Build extensions in-place
- self.reinitialize_command('build_ext', inplace=1)
- self.run_command('build_ext')
-
- self.install_site_py() # ensure that target dir is site-safe
- if setuptools.bootstrap_install_from:
- self.easy_install(setuptools.bootstrap_install_from)
- setuptools.bootstrap_install_from = None
-
- # create an .egg-link in the installation dir, pointing to our egg
- log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
- if not self.dry_run:
- with open(self.egg_link, "w") as f:
- f.write(self.egg_path + "\n" + self.setup_path)
- # postprocess the installed distro, fixing up .pth, installing scripts,
- # and handling requirements
- self.process_distribution(None, self.dist, not self.no_deps)
-
- def uninstall_link(self):
- if os.path.exists(self.egg_link):
- log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
- egg_link_file = open(self.egg_link)
- contents = [line.rstrip() for line in egg_link_file]
- egg_link_file.close()
- if contents not in ([self.egg_path],
- [self.egg_path, self.setup_path]):
- log.warn("Link points to %s: uninstall aborted", contents)
- return
- if not self.dry_run:
- os.unlink(self.egg_link)
- if not self.dry_run:
- self.update_pth(self.dist) # remove any .pth link to us
- if self.distribution.scripts:
- # XXX should also check for entry point scripts!
- log.warn("Note: you must uninstall or replace scripts manually!")
-
- def install_egg_scripts(self, dist):
- if dist is not self.dist:
- # Installing a dependency, so fall back to normal behavior
- return easy_install.install_egg_scripts(self, dist)
-
- # create wrapper scripts in the script dir, pointing to dist.scripts
-
- # new-style...
- self.install_wrapper_scripts(dist)
-
- # ...and old-style
- for script_name in self.distribution.scripts or []:
- script_path = os.path.abspath(convert_path(script_name))
- script_name = os.path.basename(script_path)
- with io.open(script_path) as strm:
- script_text = strm.read()
- self.install_script(dist, script_name, script_text, script_path)
-
- def install_wrapper_scripts(self, dist):
- dist = VersionlessRequirement(dist)
- return easy_install.install_wrapper_scripts(self, dist)
-
-
-class VersionlessRequirement(object):
- """
- Adapt a pkg_resources.Distribution to simply return the project
- name as the 'requirement' so that scripts will work across
- multiple versions.
-
- >>> dist = Distribution(project_name='foo', version='1.0')
- >>> str(dist.as_requirement())
- 'foo==1.0'
- >>> adapted_dist = VersionlessRequirement(dist)
- >>> str(adapted_dist.as_requirement())
- 'foo'
- """
- def __init__(self, dist):
- self.__dist = dist
-
- def __getattr__(self, name):
- return getattr(self.__dist, name)
-
- def as_requirement(self):
- return self.project_name
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
deleted file mode 100755
index ea5cb028..00000000
--- a/setuptools/command/easy_install.py
+++ /dev/null
@@ -1,2263 +0,0 @@
-#!/usr/bin/env python
-
-"""
-Easy Install
-------------
-
-A tool for doing automatic download/extract/build of distutils-based Python
-packages. For detailed documentation, see the accompanying EasyInstall.txt
-file, or visit the `EasyInstall home page`__.
-
-__ https://pythonhosted.org/setuptools/easy_install.html
-
-"""
-
-from glob import glob
-from distutils.util import get_platform
-from distutils.util import convert_path, subst_vars
-from distutils.errors import DistutilsArgError, DistutilsOptionError, \
- DistutilsError, DistutilsPlatformError
-from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS
-from distutils import log, dir_util
-from distutils.command.build_scripts import first_line_re
-from distutils.spawn import find_executable
-import sys
-import os
-import zipimport
-import shutil
-import tempfile
-import zipfile
-import re
-import stat
-import random
-import platform
-import textwrap
-import warnings
-import site
-import struct
-import contextlib
-import subprocess
-import shlex
-import io
-
-from setuptools.extern import six
-from setuptools.extern.six.moves import configparser, map
-
-from setuptools import Command
-from setuptools.sandbox import run_setup
-from setuptools.py31compat import get_path, get_config_vars
-from setuptools.command import setopt
-from setuptools.archive_util import unpack_archive
-from setuptools.package_index import PackageIndex
-from setuptools.package_index import URL_SCHEME
-from setuptools.command import bdist_egg, egg_info
-from pkg_resources import (
- yield_lines, normalize_path, resource_string, ensure_directory,
- get_distribution, find_distributions, Environment, Requirement,
- Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound,
- VersionConflict, DEVELOP_DIST,
-)
-import pkg_resources
-
-# Turn on PEP440Warnings
-warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
-
-
-__all__ = [
- 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
- 'main', 'get_exe_prefixes',
-]
-
-
-def is_64bit():
- return struct.calcsize("P") == 8
-
-
-def samefile(p1, p2):
- both_exist = os.path.exists(p1) and os.path.exists(p2)
- use_samefile = hasattr(os.path, 'samefile') and both_exist
- if use_samefile:
- return os.path.samefile(p1, p2)
- norm_p1 = os.path.normpath(os.path.normcase(p1))
- norm_p2 = os.path.normpath(os.path.normcase(p2))
- return norm_p1 == norm_p2
-
-
-if six.PY2:
- def _to_ascii(s):
- return s
-
- def isascii(s):
- try:
- six.text_type(s, 'ascii')
- return True
- except UnicodeError:
- return False
-else:
- def _to_ascii(s):
- return s.encode('ascii')
-
- def isascii(s):
- try:
- s.encode('ascii')
- return True
- except UnicodeError:
- return False
-
-
-class easy_install(Command):
- """Manage a download/build/install process"""
- description = "Find/get/install Python packages"
- command_consumes_arguments = True
-
- user_options = [
- ('prefix=', None, "installation prefix"),
- ("zip-ok", "z", "install package as a zipfile"),
- ("multi-version", "m", "make apps have to require() a version"),
- ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
- ("install-dir=", "d", "install package to DIR"),
- ("script-dir=", "s", "install scripts to DIR"),
- ("exclude-scripts", "x", "Don't install scripts"),
- ("always-copy", "a", "Copy all needed packages to install dir"),
- ("index-url=", "i", "base URL of Python Package Index"),
- ("find-links=", "f", "additional URL(s) to search for packages"),
- ("build-directory=", "b",
- "download/extract/build in DIR; keep the results"),
- ('optimize=', 'O',
- "also compile with optimization: -O1 for \"python -O\", "
- "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
- ('record=', None,
- "filename in which to record list of installed files"),
- ('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
- ('site-dirs=', 'S', "list of directories where .pth files work"),
- ('editable', 'e', "Install specified packages in editable form"),
- ('no-deps', 'N', "don't install dependencies"),
- ('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
- ('local-snapshots-ok', 'l',
- "allow building eggs from local checkouts"),
- ('version', None, "print version information and exit"),
- ('no-find-links', None,
- "Don't load find-links defined in packages being installed")
- ]
- boolean_options = [
- 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
- 'editable',
- 'no-deps', 'local-snapshots-ok', 'version'
- ]
-
- if site.ENABLE_USER_SITE:
- help_msg = "install in user site-package '%s'" % site.USER_SITE
- user_options.append(('user', None, help_msg))
- boolean_options.append('user')
-
- negative_opt = {'always-unzip': 'zip-ok'}
- create_index = PackageIndex
-
- def initialize_options(self):
- # the --user option seems to be an opt-in one,
- # so the default should be False.
- self.user = 0
- self.zip_ok = self.local_snapshots_ok = None
- self.install_dir = self.script_dir = self.exclude_scripts = None
- self.index_url = None
- self.find_links = None
- self.build_directory = None
- self.args = None
- self.optimize = self.record = None
- self.upgrade = self.always_copy = self.multi_version = None
- self.editable = self.no_deps = self.allow_hosts = None
- self.root = self.prefix = self.no_report = None
- self.version = None
- self.install_purelib = None # for pure module distributions
- self.install_platlib = None # non-pure (dists w/ extensions)
- self.install_headers = None # for C/C++ headers
- self.install_lib = None # set to either purelib or platlib
- self.install_scripts = None
- self.install_data = None
- self.install_base = None
- self.install_platbase = None
- if site.ENABLE_USER_SITE:
- self.install_userbase = site.USER_BASE
- self.install_usersite = site.USER_SITE
- else:
- self.install_userbase = None
- self.install_usersite = None
- self.no_find_links = None
-
- # Options not specifiable via command line
- self.package_index = None
- self.pth_file = self.always_copy_from = None
- self.site_dirs = None
- self.installed_projects = {}
- self.sitepy_installed = False
- # Always read easy_install options, even if we are subclassed, or have
- # an independent instance created. This ensures that defaults will
- # always come from the standard configuration file(s)' "easy_install"
- # section, even if this is a "develop" or "install" command, or some
- # other embedding.
- self._dry_run = None
- self.verbose = self.distribution.verbose
- self.distribution._set_command_options(
- self, self.distribution.get_option_dict('easy_install')
- )
-
- def delete_blockers(self, blockers):
- extant_blockers = (
- filename for filename in blockers
- if os.path.exists(filename) or os.path.islink(filename)
- )
- list(map(self._delete_path, extant_blockers))
-
- def _delete_path(self, path):
- log.info("Deleting %s", path)
- if self.dry_run:
- return
-
- is_tree = os.path.isdir(path) and not os.path.islink(path)
- remover = rmtree if is_tree else os.unlink
- remover(path)
-
- @staticmethod
- def _render_version():
- """
- Render the Setuptools version and installation details, then exit.
- """
- ver = sys.version[:3]
- dist = get_distribution('setuptools')
- tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})'
- print(tmpl.format(**locals()))
- raise SystemExit()
-
- def finalize_options(self):
- self.version and self._render_version()
-
- py_version = sys.version.split()[0]
- prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix')
-
- self.config_vars = {
- 'dist_name': self.distribution.get_name(),
- 'dist_version': self.distribution.get_version(),
- 'dist_fullname': self.distribution.get_fullname(),
- 'py_version': py_version,
- 'py_version_short': py_version[0:3],
- 'py_version_nodot': py_version[0] + py_version[2],
- 'sys_prefix': prefix,
- 'prefix': prefix,
- 'sys_exec_prefix': exec_prefix,
- 'exec_prefix': exec_prefix,
- # Only python 3.2+ has abiflags
- 'abiflags': getattr(sys, 'abiflags', ''),
- }
-
- if site.ENABLE_USER_SITE:
- self.config_vars['userbase'] = self.install_userbase
- self.config_vars['usersite'] = self.install_usersite
-
- self._fix_install_dir_for_user_site()
-
- self.expand_basedirs()
- self.expand_dirs()
-
- self._expand('install_dir', 'script_dir', 'build_directory',
- 'site_dirs')
- # If a non-default installation directory was specified, default the
- # script directory to match it.
- if self.script_dir is None:
- self.script_dir = self.install_dir
-
- if self.no_find_links is None:
- self.no_find_links = False
-
- # Let install_dir get set by install_lib command, which in turn
- # gets its info from the install command, and takes into account
- # --prefix and --home and all that other crud.
- self.set_undefined_options(
- 'install_lib', ('install_dir', 'install_dir')
- )
- # Likewise, set default script_dir from 'install_scripts.install_dir'
- self.set_undefined_options(
- 'install_scripts', ('install_dir', 'script_dir')
- )
-
- if self.user and self.install_purelib:
- self.install_dir = self.install_purelib
- self.script_dir = self.install_scripts
- # default --record from the install command
- self.set_undefined_options('install', ('record', 'record'))
- # Should this be moved to the if statement below? It's not used
- # elsewhere
- normpath = map(normalize_path, sys.path)
- self.all_site_dirs = get_site_dirs()
- if self.site_dirs is not None:
- site_dirs = [
- os.path.expanduser(s.strip()) for s in
- self.site_dirs.split(',')
- ]
- for d in site_dirs:
- if not os.path.isdir(d):
- log.warn("%s (in --site-dirs) does not exist", d)
- elif normalize_path(d) not in normpath:
- raise DistutilsOptionError(
- d + " (in --site-dirs) is not on sys.path"
- )
- else:
- self.all_site_dirs.append(normalize_path(d))
- if not self.editable:
- self.check_site_dir()
- self.index_url = self.index_url or "https://pypi.python.org/simple"
- self.shadow_path = self.all_site_dirs[:]
- for path_item in self.install_dir, normalize_path(self.script_dir):
- if path_item not in self.shadow_path:
- self.shadow_path.insert(0, path_item)
-
- if self.allow_hosts is not None:
- hosts = [s.strip() for s in self.allow_hosts.split(',')]
- else:
- hosts = ['*']
- if self.package_index is None:
- self.package_index = self.create_index(
- self.index_url, search_path=self.shadow_path, hosts=hosts,
- )
- self.local_index = Environment(self.shadow_path + sys.path)
-
- if self.find_links is not None:
- if isinstance(self.find_links, six.string_types):
- self.find_links = self.find_links.split()
- else:
- self.find_links = []
- if self.local_snapshots_ok:
- self.package_index.scan_egg_links(self.shadow_path + sys.path)
- if not self.no_find_links:
- self.package_index.add_find_links(self.find_links)
- self.set_undefined_options('install_lib', ('optimize', 'optimize'))
- if not isinstance(self.optimize, int):
- try:
- self.optimize = int(self.optimize)
- if not (0 <= self.optimize <= 2):
- raise ValueError
- except ValueError:
- raise DistutilsOptionError("--optimize must be 0, 1, or 2")
-
- if self.editable and not self.build_directory:
- raise DistutilsArgError(
- "Must specify a build directory (-b) when using --editable"
- )
- if not self.args:
- raise DistutilsArgError(
- "No urls, filenames, or requirements specified (see --help)")
-
- self.outputs = []
-
- def _fix_install_dir_for_user_site(self):
- """
- Fix the install_dir if "--user" was used.
- """
- if not self.user or not site.ENABLE_USER_SITE:
- return
-
- self.create_home_path()
- if self.install_userbase is None:
- msg = "User base directory is not specified"
- raise DistutilsPlatformError(msg)
- self.install_base = self.install_platbase = self.install_userbase
- scheme_name = os.name.replace('posix', 'unix') + '_user'
- self.select_scheme(scheme_name)
-
- def _expand_attrs(self, attrs):
- for attr in attrs:
- val = getattr(self, attr)
- if val is not None:
- if os.name == 'posix' or os.name == 'nt':
- val = os.path.expanduser(val)
- val = subst_vars(val, self.config_vars)
- setattr(self, attr, val)
-
- def expand_basedirs(self):
- """Calls `os.path.expanduser` on install_base, install_platbase and
- root."""
- self._expand_attrs(['install_base', 'install_platbase', 'root'])
-
- def expand_dirs(self):
- """Calls `os.path.expanduser` on install dirs."""
- self._expand_attrs(['install_purelib', 'install_platlib',
- 'install_lib', 'install_headers',
- 'install_scripts', 'install_data', ])
-
- def run(self):
- if self.verbose != self.distribution.verbose:
- log.set_verbosity(self.verbose)
- try:
- for spec in self.args:
- self.easy_install(spec, not self.no_deps)
- if self.record:
- outputs = self.outputs
- if self.root: # strip any package prefix
- root_len = len(self.root)
- for counter in range(len(outputs)):
- outputs[counter] = outputs[counter][root_len:]
- from distutils import file_util
-
- self.execute(
- file_util.write_file, (self.record, outputs),
- "writing list of installed files to '%s'" %
- self.record
- )
- self.warn_deprecated_options()
- finally:
- log.set_verbosity(self.distribution.verbose)
-
- def pseudo_tempname(self):
- """Return a pseudo-tempname base in the install directory.
- This code is intentionally naive; if a malicious party can write to
- the target directory you're already in deep doodoo.
- """
- try:
- pid = os.getpid()
- except:
- pid = random.randint(0, sys.maxsize)
- return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
-
- def warn_deprecated_options(self):
- pass
-
- def check_site_dir(self):
- """Verify that self.install_dir is .pth-capable dir, if needed"""
-
- instdir = normalize_path(self.install_dir)
- pth_file = os.path.join(instdir, 'easy-install.pth')
-
- # Is it a configured, PYTHONPATH, implicit, or explicit site dir?
- is_site_dir = instdir in self.all_site_dirs
-
- if not is_site_dir and not self.multi_version:
- # No? Then directly test whether it does .pth file processing
- is_site_dir = self.check_pth_processing()
- else:
- # make sure we can write to target dir
- testfile = self.pseudo_tempname() + '.write-test'
- test_exists = os.path.exists(testfile)
- try:
- if test_exists:
- os.unlink(testfile)
- open(testfile, 'w').close()
- os.unlink(testfile)
- except (OSError, IOError):
- self.cant_write_to_target()
-
- if not is_site_dir and not self.multi_version:
- # Can't install non-multi to non-site dir
- raise DistutilsError(self.no_default_version_msg())
-
- if is_site_dir:
- if self.pth_file is None:
- self.pth_file = PthDistributions(pth_file, self.all_site_dirs)
- else:
- self.pth_file = None
-
- PYTHONPATH = os.environ.get('PYTHONPATH', '').split(os.pathsep)
- if instdir not in map(normalize_path, filter(None, PYTHONPATH)):
- # only PYTHONPATH dirs need a site.py, so pretend it's there
- self.sitepy_installed = True
- elif self.multi_version and not os.path.exists(pth_file):
- self.sitepy_installed = True # don't need site.py in this case
- self.pth_file = None # and don't create a .pth file
- self.install_dir = instdir
-
- __cant_write_msg = textwrap.dedent("""
- can't create or remove files in install directory
-
- The following error occurred while trying to add or remove files in the
- installation directory:
-
- %s
-
- The installation directory you specified (via --install-dir, --prefix, or
- the distutils default setting) was:
-
- %s
- """).lstrip()
-
- __not_exists_id = textwrap.dedent("""
- This directory does not currently exist. Please create it and try again, or
- choose a different installation directory (using the -d or --install-dir
- option).
- """).lstrip()
-
- __access_msg = textwrap.dedent("""
- Perhaps your account does not have write access to this directory? If the
- installation directory is a system-owned directory, you may need to sign in
- as the administrator or "root" account. If you do not have administrative
- access to this machine, you may wish to choose a different installation
- directory, preferably one that is listed in your PYTHONPATH environment
- variable.
-
- For information on other options, you may wish to consult the
- documentation at:
-
- https://pythonhosted.org/setuptools/easy_install.html
-
- Please make the appropriate changes for your system and try again.
- """).lstrip()
-
- def cant_write_to_target(self):
- msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,)
-
- if not os.path.exists(self.install_dir):
- msg += '\n' + self.__not_exists_id
- else:
- msg += '\n' + self.__access_msg
- raise DistutilsError(msg)
-
- def check_pth_processing(self):
- """Empirically verify whether .pth files are supported in inst. dir"""
- instdir = self.install_dir
- log.info("Checking .pth file support in %s", instdir)
- pth_file = self.pseudo_tempname() + ".pth"
- ok_file = pth_file + '.ok'
- ok_exists = os.path.exists(ok_file)
- try:
- if ok_exists:
- os.unlink(ok_file)
- dirname = os.path.dirname(ok_file)
- if not os.path.exists(dirname):
- os.makedirs(dirname)
- f = open(pth_file, 'w')
- except (OSError, IOError):
- self.cant_write_to_target()
- else:
- try:
- f.write("import os; f = open(%r, 'w'); f.write('OK'); "
- "f.close()\n" % (ok_file,))
- f.close()
- f = None
- executable = sys.executable
- if os.name == 'nt':
- dirname, basename = os.path.split(executable)
- alt = os.path.join(dirname, 'pythonw.exe')
- if (basename.lower() == 'python.exe' and
- os.path.exists(alt)):
- # use pythonw.exe to avoid opening a console window
- executable = alt
-
- from distutils.spawn import spawn
-
- spawn([executable, '-E', '-c', 'pass'], 0)
-
- if os.path.exists(ok_file):
- log.info(
- "TEST PASSED: %s appears to support .pth files",
- instdir
- )
- return True
- finally:
- if f:
- f.close()
- if os.path.exists(ok_file):
- os.unlink(ok_file)
- if os.path.exists(pth_file):
- os.unlink(pth_file)
- if not self.multi_version:
- log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
- return False
-
- def install_egg_scripts(self, dist):
- """Write all the scripts for `dist`, unless scripts are excluded"""
- if not self.exclude_scripts and dist.metadata_isdir('scripts'):
- for script_name in dist.metadata_listdir('scripts'):
- if dist.metadata_isdir('scripts/' + script_name):
- # The "script" is a directory, likely a Python 3
- # __pycache__ directory, so skip it.
- continue
- self.install_script(
- dist, script_name,
- dist.get_metadata('scripts/' + script_name)
- )
- self.install_wrapper_scripts(dist)
-
- def add_output(self, path):
- if os.path.isdir(path):
- for base, dirs, files in os.walk(path):
- for filename in files:
- self.outputs.append(os.path.join(base, filename))
- else:
- self.outputs.append(path)
-
- def not_editable(self, spec):
- if self.editable:
- raise DistutilsArgError(
- "Invalid argument %r: you can't use filenames or URLs "
- "with --editable (except via the --find-links option)."
- % (spec,)
- )
-
- def check_editable(self, spec):
- if not self.editable:
- return
-
- if os.path.exists(os.path.join(self.build_directory, spec.key)):
- raise DistutilsArgError(
- "%r already exists in %s; can't do a checkout there" %
- (spec.key, self.build_directory)
- )
-
- def easy_install(self, spec, deps=False):
- tmpdir = tempfile.mkdtemp(prefix="easy_install-")
- download = None
- if not self.editable:
- self.install_site_py()
-
- try:
- if not isinstance(spec, Requirement):
- if URL_SCHEME(spec):
- # It's a url, download it to tmpdir and process
- self.not_editable(spec)
- download = self.package_index.download(spec, tmpdir)
- return self.install_item(None, download, tmpdir, deps,
- True)
-
- elif os.path.exists(spec):
- # Existing file or directory, just process it directly
- self.not_editable(spec)
- return self.install_item(None, spec, tmpdir, deps, True)
- else:
- spec = parse_requirement_arg(spec)
-
- self.check_editable(spec)
- dist = self.package_index.fetch_distribution(
- spec, tmpdir, self.upgrade, self.editable,
- not self.always_copy, self.local_index
- )
- if dist is None:
- msg = "Could not find suitable distribution for %r" % spec
- if self.always_copy:
- msg += " (--always-copy skips system and development eggs)"
- raise DistutilsError(msg)
- elif dist.precedence == DEVELOP_DIST:
- # .egg-info dists don't need installing, just process deps
- self.process_distribution(spec, dist, deps, "Using")
- return dist
- else:
- return self.install_item(spec, dist.location, tmpdir, deps)
-
- finally:
- if os.path.exists(tmpdir):
- rmtree(tmpdir)
-
- def install_item(self, spec, download, tmpdir, deps, install_needed=False):
-
- # Installation is also needed if file in tmpdir or is not an egg
- install_needed = install_needed or self.always_copy
- install_needed = install_needed or os.path.dirname(download) == tmpdir
- install_needed = install_needed or not download.endswith('.egg')
- install_needed = install_needed or (
- self.always_copy_from is not None and
- os.path.dirname(normalize_path(download)) ==
- normalize_path(self.always_copy_from)
- )
-
- if spec and not install_needed:
- # at this point, we know it's a local .egg, we just don't know if
- # it's already installed.
- for dist in self.local_index[spec.project_name]:
- if dist.location == download:
- break
- else:
- install_needed = True # it's not in the local index
-
- log.info("Processing %s", os.path.basename(download))
-
- if install_needed:
- dists = self.install_eggs(spec, download, tmpdir)
- for dist in dists:
- self.process_distribution(spec, dist, deps)
- else:
- dists = [self.egg_distribution(download)]
- self.process_distribution(spec, dists[0], deps, "Using")
-
- if spec is not None:
- for dist in dists:
- if dist in spec:
- return dist
-
- def select_scheme(self, name):
- """Sets the install directories by applying the install schemes."""
- # it's the caller's problem if they supply a bad name!
- scheme = INSTALL_SCHEMES[name]
- for key in SCHEME_KEYS:
- attrname = 'install_' + key
- if getattr(self, attrname) is None:
- setattr(self, attrname, scheme[key])
-
- def process_distribution(self, requirement, dist, deps=True, *info):
- self.update_pth(dist)
- self.package_index.add(dist)
- if dist in self.local_index[dist.key]:
- self.local_index.remove(dist)
- self.local_index.add(dist)
- self.install_egg_scripts(dist)
- self.installed_projects[dist.key] = dist
- log.info(self.installation_report(requirement, dist, *info))
- if (dist.has_metadata('dependency_links.txt') and
- not self.no_find_links):
- self.package_index.add_find_links(
- dist.get_metadata_lines('dependency_links.txt')
- )
- if not deps and not self.always_copy:
- return
- elif requirement is not None and dist.key != requirement.key:
- log.warn("Skipping dependencies for %s", dist)
- return # XXX this is not the distribution we were looking for
- elif requirement is None or dist not in requirement:
- # if we wound up with a different version, resolve what we've got
- distreq = dist.as_requirement()
- requirement = Requirement(str(distreq))
- log.info("Processing dependencies for %s", requirement)
- try:
- distros = WorkingSet([]).resolve(
- [requirement], self.local_index, self.easy_install
- )
- except DistributionNotFound as e:
- raise DistutilsError(str(e))
- except VersionConflict as e:
- raise DistutilsError(e.report())
- if self.always_copy or self.always_copy_from:
- # Force all the relevant distros to be copied or activated
- for dist in distros:
- if dist.key not in self.installed_projects:
- self.easy_install(dist.as_requirement())
- log.info("Finished processing dependencies for %s", requirement)
-
- def should_unzip(self, dist):
- if self.zip_ok is not None:
- return not self.zip_ok
- if dist.has_metadata('not-zip-safe'):
- return True
- if not dist.has_metadata('zip-safe'):
- return True
- return False
-
- def maybe_move(self, spec, dist_filename, setup_base):
- dst = os.path.join(self.build_directory, spec.key)
- if os.path.exists(dst):
- msg = ("%r already exists in %s; build directory %s will not be "
- "kept")
- log.warn(msg, spec.key, self.build_directory, setup_base)
- return setup_base
- if os.path.isdir(dist_filename):
- setup_base = dist_filename
- else:
- if os.path.dirname(dist_filename) == setup_base:
- os.unlink(dist_filename) # get it out of the tmp dir
- contents = os.listdir(setup_base)
- if len(contents) == 1:
- dist_filename = os.path.join(setup_base, contents[0])
- if os.path.isdir(dist_filename):
- # if the only thing there is a directory, move it instead
- setup_base = dist_filename
- ensure_directory(dst)
- shutil.move(setup_base, dst)
- return dst
-
- def install_wrapper_scripts(self, dist):
- if self.exclude_scripts:
- return
- for args in ScriptWriter.best().get_args(dist):
- self.write_script(*args)
-
- def install_script(self, dist, script_name, script_text, dev_path=None):
- """Generate a legacy script wrapper and install it"""
- spec = str(dist.as_requirement())
- is_script = is_python_script(script_text, script_name)
-
- if is_script:
- body = self._load_template(dev_path) % locals()
- script_text = ScriptWriter.get_header(script_text) + body
- self.write_script(script_name, _to_ascii(script_text), 'b')
-
- @staticmethod
- def _load_template(dev_path):
- """
- There are a couple of template scripts in the package. This
- function loads one of them and prepares it for use.
- """
- # See https://github.com/pypa/setuptools/issues/134 for info
- # on script file naming and downstream issues with SVR4
- name = 'script.tmpl'
- if dev_path:
- name = name.replace('.tmpl', ' (dev).tmpl')
-
- raw_bytes = resource_string('setuptools', name)
- return raw_bytes.decode('utf-8')
-
- def write_script(self, script_name, contents, mode="t", blockers=()):
- """Write an executable file to the scripts directory"""
- self.delete_blockers( # clean up old .py/.pyw w/o a script
- [os.path.join(self.script_dir, x) for x in blockers]
- )
- log.info("Installing %s script to %s", script_name, self.script_dir)
- target = os.path.join(self.script_dir, script_name)
- self.add_output(target)
-
- mask = current_umask()
- if not self.dry_run:
- ensure_directory(target)
- if os.path.exists(target):
- os.unlink(target)
- with open(target, "w" + mode) as f:
- f.write(contents)
- chmod(target, 0o777 - mask)
-
- def install_eggs(self, spec, dist_filename, tmpdir):
- # .egg dirs or files are already built, so just return them
- if dist_filename.lower().endswith('.egg'):
- return [self.install_egg(dist_filename, tmpdir)]
- elif dist_filename.lower().endswith('.exe'):
- return [self.install_exe(dist_filename, tmpdir)]
-
- # Anything else, try to extract and build
- setup_base = tmpdir
- if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
- unpack_archive(dist_filename, tmpdir, self.unpack_progress)
- elif os.path.isdir(dist_filename):
- setup_base = os.path.abspath(dist_filename)
-
- if (setup_base.startswith(tmpdir) # something we downloaded
- and self.build_directory and spec is not None):
- setup_base = self.maybe_move(spec, dist_filename, setup_base)
-
- # Find the setup.py file
- setup_script = os.path.join(setup_base, 'setup.py')
-
- if not os.path.exists(setup_script):
- setups = glob(os.path.join(setup_base, '*', 'setup.py'))
- if not setups:
- raise DistutilsError(
- "Couldn't find a setup script in %s" %
- os.path.abspath(dist_filename)
- )
- if len(setups) > 1:
- raise DistutilsError(
- "Multiple setup scripts in %s" %
- os.path.abspath(dist_filename)
- )
- setup_script = setups[0]
-
- # Now run it, and return the result
- if self.editable:
- log.info(self.report_editable(spec, setup_script))
- return []
- else:
- return self.build_and_install(setup_script, setup_base)
-
- def egg_distribution(self, egg_path):
- if os.path.isdir(egg_path):
- metadata = PathMetadata(egg_path, os.path.join(egg_path,
- 'EGG-INFO'))
- else:
- metadata = EggMetadata(zipimport.zipimporter(egg_path))
- return Distribution.from_filename(egg_path, metadata=metadata)
-
- def install_egg(self, egg_path, tmpdir):
- destination = os.path.join(self.install_dir,
- os.path.basename(egg_path))
- destination = os.path.abspath(destination)
- if not self.dry_run:
- ensure_directory(destination)
-
- dist = self.egg_distribution(egg_path)
- if not samefile(egg_path, destination):
- if os.path.isdir(destination) and not os.path.islink(destination):
- dir_util.remove_tree(destination, dry_run=self.dry_run)
- elif os.path.exists(destination):
- self.execute(os.unlink, (destination,), "Removing " +
- destination)
- try:
- new_dist_is_zipped = False
- if os.path.isdir(egg_path):
- if egg_path.startswith(tmpdir):
- f, m = shutil.move, "Moving"
- else:
- f, m = shutil.copytree, "Copying"
- elif self.should_unzip(dist):
- self.mkpath(destination)
- f, m = self.unpack_and_compile, "Extracting"
- else:
- new_dist_is_zipped = True
- if egg_path.startswith(tmpdir):
- f, m = shutil.move, "Moving"
- else:
- f, m = shutil.copy2, "Copying"
- self.execute(f, (egg_path, destination),
- (m + " %s to %s") %
- (os.path.basename(egg_path),
- os.path.dirname(destination)))
- update_dist_caches(destination,
- fix_zipimporter_caches=new_dist_is_zipped)
- except:
- update_dist_caches(destination, fix_zipimporter_caches=False)
- raise
-
- self.add_output(destination)
- return self.egg_distribution(destination)
-
- def install_exe(self, dist_filename, tmpdir):
- # See if it's valid, get data
- cfg = extract_wininst_cfg(dist_filename)
- if cfg is None:
- raise DistutilsError(
- "%s is not a valid distutils Windows .exe" % dist_filename
- )
- # Create a dummy distribution object until we build the real distro
- dist = Distribution(
- None,
- project_name=cfg.get('metadata', 'name'),
- version=cfg.get('metadata', 'version'), platform=get_platform(),
- )
-
- # Convert the .exe to an unpacked egg
- egg_path = dist.location = os.path.join(tmpdir, dist.egg_name() +
- '.egg')
- egg_tmp = egg_path + '.tmp'
- _egg_info = os.path.join(egg_tmp, 'EGG-INFO')
- pkg_inf = os.path.join(_egg_info, 'PKG-INFO')
- ensure_directory(pkg_inf) # make sure EGG-INFO dir exists
- dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX
- self.exe_to_egg(dist_filename, egg_tmp)
-
- # Write EGG-INFO/PKG-INFO
- if not os.path.exists(pkg_inf):
- f = open(pkg_inf, 'w')
- f.write('Metadata-Version: 1.0\n')
- for k, v in cfg.items('metadata'):
- if k != 'target_version':
- f.write('%s: %s\n' % (k.replace('_', '-').title(), v))
- f.close()
- script_dir = os.path.join(_egg_info, 'scripts')
- # delete entry-point scripts to avoid duping
- self.delete_blockers(
- [os.path.join(script_dir, args[0]) for args in
- ScriptWriter.get_args(dist)]
- )
- # Build .egg file from tmpdir
- bdist_egg.make_zipfile(
- egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run
- )
- # install the .egg
- return self.install_egg(egg_path, tmpdir)
-
- def exe_to_egg(self, dist_filename, egg_tmp):
- """Extract a bdist_wininst to the directories an egg would use"""
- # Check for .pth file and set up prefix translations
- prefixes = get_exe_prefixes(dist_filename)
- to_compile = []
- native_libs = []
- top_level = {}
-
- def process(src, dst):
- s = src.lower()
- for old, new in prefixes:
- if s.startswith(old):
- src = new + src[len(old):]
- parts = src.split('/')
- dst = os.path.join(egg_tmp, *parts)
- dl = dst.lower()
- if dl.endswith('.pyd') or dl.endswith('.dll'):
- parts[-1] = bdist_egg.strip_module(parts[-1])
- top_level[os.path.splitext(parts[0])[0]] = 1
- native_libs.append(src)
- elif dl.endswith('.py') and old != 'SCRIPTS/':
- top_level[os.path.splitext(parts[0])[0]] = 1
- to_compile.append(dst)
- return dst
- if not src.endswith('.pth'):
- log.warn("WARNING: can't process %s", src)
- return None
-
- # extract, tracking .pyd/.dll->native_libs and .py -> to_compile
- unpack_archive(dist_filename, egg_tmp, process)
- stubs = []
- for res in native_libs:
- if res.lower().endswith('.pyd'): # create stubs for .pyd's
- parts = res.split('/')
- resource = parts[-1]
- parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py'
- pyfile = os.path.join(egg_tmp, *parts)
- to_compile.append(pyfile)
- stubs.append(pyfile)
- bdist_egg.write_stub(resource, pyfile)
- self.byte_compile(to_compile) # compile .py's
- bdist_egg.write_safety_flag(
- os.path.join(egg_tmp, 'EGG-INFO'),
- bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag
-
- for name in 'top_level', 'native_libs':
- if locals()[name]:
- txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt')
- if not os.path.exists(txt):
- f = open(txt, 'w')
- f.write('\n'.join(locals()[name]) + '\n')
- f.close()
-
- __mv_warning = textwrap.dedent("""
- Because this distribution was installed --multi-version, before you can
- import modules from this package in an application, you will need to
- 'import pkg_resources' and then use a 'require()' call similar to one of
- these examples, in order to select the desired version:
-
- pkg_resources.require("%(name)s") # latest installed version
- pkg_resources.require("%(name)s==%(version)s") # this exact version
- pkg_resources.require("%(name)s>=%(version)s") # this version or higher
- """).lstrip()
-
- __id_warning = textwrap.dedent("""
- Note also that the installation directory must be on sys.path at runtime for
- this to work. (e.g. by being the application's script directory, by being on
- PYTHONPATH, or by being added to sys.path by your code.)
- """)
-
- def installation_report(self, req, dist, what="Installed"):
- """Helpful installation message for display to package users"""
- msg = "\n%(what)s %(eggloc)s%(extras)s"
- if self.multi_version and not self.no_report:
- msg += '\n' + self.__mv_warning
- if self.install_dir not in map(normalize_path, sys.path):
- msg += '\n' + self.__id_warning
-
- eggloc = dist.location
- name = dist.project_name
- version = dist.version
- extras = '' # TODO: self.report_extras(req, dist)
- return msg % locals()
-
- __editable_msg = textwrap.dedent("""
- Extracted editable version of %(spec)s to %(dirname)s
-
- If it uses setuptools in its setup script, you can activate it in
- "development" mode by going to that directory and running::
-
- %(python)s setup.py develop
-
- See the setuptools documentation for the "develop" command for more info.
- """).lstrip()
-
- def report_editable(self, spec, setup_script):
- dirname = os.path.dirname(setup_script)
- python = sys.executable
- return '\n' + self.__editable_msg % locals()
-
- def run_setup(self, setup_script, setup_base, args):
- sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
- sys.modules.setdefault('distutils.command.egg_info', egg_info)
-
- args = list(args)
- if self.verbose > 2:
- v = 'v' * (self.verbose - 1)
- args.insert(0, '-' + v)
- elif self.verbose < 2:
- args.insert(0, '-q')
- if self.dry_run:
- args.insert(0, '-n')
- log.info(
- "Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args)
- )
- try:
- run_setup(setup_script, args)
- except SystemExit as v:
- raise DistutilsError("Setup script exited with %s" % (v.args[0],))
-
- def build_and_install(self, setup_script, setup_base):
- args = ['bdist_egg', '--dist-dir']
-
- dist_dir = tempfile.mkdtemp(
- prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
- )
- try:
- self._set_fetcher_options(os.path.dirname(setup_script))
- args.append(dist_dir)
-
- self.run_setup(setup_script, setup_base, args)
- all_eggs = Environment([dist_dir])
- eggs = []
- for key in all_eggs:
- for dist in all_eggs[key]:
- eggs.append(self.install_egg(dist.location, setup_base))
- if not eggs and not self.dry_run:
- log.warn("No eggs found in %s (setup script problem?)",
- dist_dir)
- return eggs
- finally:
- rmtree(dist_dir)
- log.set_verbosity(self.verbose) # restore our log verbosity
-
- def _set_fetcher_options(self, base):
- """
- When easy_install is about to run bdist_egg on a source dist, that
- source dist might have 'setup_requires' directives, requiring
- additional fetching. Ensure the fetcher options given to easy_install
- are available to that command as well.
- """
- # find the fetch options from easy_install and write them out
- # to the setup.cfg file.
- ei_opts = self.distribution.get_option_dict('easy_install').copy()
- fetch_directives = (
- 'find_links', 'site_dirs', 'index_url', 'optimize',
- 'site_dirs', 'allow_hosts',
- )
- fetch_options = {}
- for key, val in ei_opts.items():
- if key not in fetch_directives:
- continue
- fetch_options[key.replace('_', '-')] = val[1]
- # create a settings dictionary suitable for `edit_config`
- settings = dict(easy_install=fetch_options)
- cfg_filename = os.path.join(base, 'setup.cfg')
- setopt.edit_config(cfg_filename, settings)
-
- def update_pth(self, dist):
- if self.pth_file is None:
- return
-
- for d in self.pth_file[dist.key]: # drop old entries
- if self.multi_version or d.location != dist.location:
- log.info("Removing %s from easy-install.pth file", d)
- self.pth_file.remove(d)
- if d.location in self.shadow_path:
- self.shadow_path.remove(d.location)
-
- if not self.multi_version:
- if dist.location in self.pth_file.paths:
- log.info(
- "%s is already the active version in easy-install.pth",
- dist
- )
- else:
- log.info("Adding %s to easy-install.pth file", dist)
- self.pth_file.add(dist) # add new entry
- if dist.location not in self.shadow_path:
- self.shadow_path.append(dist.location)
-
- if not self.dry_run:
-
- self.pth_file.save()
-
- if dist.key == 'setuptools':
- # Ensure that setuptools itself never becomes unavailable!
- # XXX should this check for latest version?
- filename = os.path.join(self.install_dir, 'setuptools.pth')
- if os.path.islink(filename):
- os.unlink(filename)
- f = open(filename, 'wt')
- f.write(self.pth_file.make_relative(dist.location) + '\n')
- f.close()
-
- def unpack_progress(self, src, dst):
- # Progress filter for unpacking
- log.debug("Unpacking %s to %s", src, dst)
- return dst # only unpack-and-compile skips files for dry run
-
- def unpack_and_compile(self, egg_path, destination):
- to_compile = []
- to_chmod = []
-
- def pf(src, dst):
- if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
- to_compile.append(dst)
- elif dst.endswith('.dll') or dst.endswith('.so'):
- to_chmod.append(dst)
- self.unpack_progress(src, dst)
- return not self.dry_run and dst or None
-
- unpack_archive(egg_path, destination, pf)
- self.byte_compile(to_compile)
- if not self.dry_run:
- for f in to_chmod:
- mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755
- chmod(f, mode)
-
- def byte_compile(self, to_compile):
- if sys.dont_write_bytecode:
- self.warn('byte-compiling is disabled, skipping.')
- return
-
- from distutils.util import byte_compile
-
- try:
- # try to make the byte compile messages quieter
- log.set_verbosity(self.verbose - 1)
-
- byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
- if self.optimize:
- byte_compile(
- to_compile, optimize=self.optimize, force=1,
- dry_run=self.dry_run
- )
- finally:
- log.set_verbosity(self.verbose) # restore original verbosity
-
- __no_default_msg = textwrap.dedent("""
- bad install directory or PYTHONPATH
-
- You are attempting to install a package to a directory that is not
- on PYTHONPATH and which Python does not read ".pth" files from. The
- installation directory you specified (via --install-dir, --prefix, or
- the distutils default setting) was:
-
- %s
-
- and your PYTHONPATH environment variable currently contains:
-
- %r
-
- Here are some of your options for correcting the problem:
-
- * You can choose a different installation directory, i.e., one that is
- on PYTHONPATH or supports .pth files
-
- * You can add the installation directory to the PYTHONPATH environment
- variable. (It must then also be on PYTHONPATH whenever you run
- Python and want to use the package(s) you are installing.)
-
- * You can set up the installation directory to support ".pth" files by
- using one of the approaches described here:
-
- https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations
-
- Please make the appropriate changes for your system and try again.""").lstrip()
-
- def no_default_version_msg(self):
- template = self.__no_default_msg
- return template % (self.install_dir, os.environ.get('PYTHONPATH', ''))
-
- def install_site_py(self):
- """Make sure there's a site.py in the target dir, if needed"""
-
- if self.sitepy_installed:
- return # already did it, or don't need to
-
- sitepy = os.path.join(self.install_dir, "site.py")
- source = resource_string("setuptools", "site-patch.py")
- source = source.decode('utf-8')
- current = ""
-
- if os.path.exists(sitepy):
- log.debug("Checking existing site.py in %s", self.install_dir)
- with io.open(sitepy) as strm:
- current = strm.read()
-
- if not current.startswith('def __boot():'):
- raise DistutilsError(
- "%s is not a setuptools-generated site.py; please"
- " remove it." % sitepy
- )
-
- if current != source:
- log.info("Creating %s", sitepy)
- if not self.dry_run:
- ensure_directory(sitepy)
- with io.open(sitepy, 'w', encoding='utf-8') as strm:
- strm.write(source)
- self.byte_compile([sitepy])
-
- self.sitepy_installed = True
-
- def create_home_path(self):
- """Create directories under ~."""
- if not self.user:
- return
- home = convert_path(os.path.expanduser("~"))
- for name, path in six.iteritems(self.config_vars):
- if path.startswith(home) and not os.path.isdir(path):
- self.debug_print("os.makedirs('%s', 0o700)" % path)
- os.makedirs(path, 0o700)
-
- INSTALL_SCHEMES = dict(
- posix=dict(
- install_dir='$base/lib/python$py_version_short/site-packages',
- script_dir='$base/bin',
- ),
- )
-
- DEFAULT_SCHEME = dict(
- install_dir='$base/Lib/site-packages',
- script_dir='$base/Scripts',
- )
-
- def _expand(self, *attrs):
- config_vars = self.get_finalized_command('install').config_vars
-
- if self.prefix:
- # Set default install_dir/scripts from --prefix
- config_vars = config_vars.copy()
- config_vars['base'] = self.prefix
- scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME)
- for attr, val in scheme.items():
- if getattr(self, attr, None) is None:
- setattr(self, attr, val)
-
- from distutils.util import subst_vars
-
- for attr in attrs:
- val = getattr(self, attr)
- if val is not None:
- val = subst_vars(val, config_vars)
- if os.name == 'posix':
- val = os.path.expanduser(val)
- setattr(self, attr, val)
-
-
-def get_site_dirs():
- # return a list of 'site' dirs
- sitedirs = [_f for _f in os.environ.get('PYTHONPATH',
- '').split(os.pathsep) if _f]
- prefixes = [sys.prefix]
- if sys.exec_prefix != sys.prefix:
- prefixes.append(sys.exec_prefix)
- for prefix in prefixes:
- if prefix:
- if sys.platform in ('os2emx', 'riscos'):
- sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
- elif os.sep == '/':
- sitedirs.extend([os.path.join(prefix,
- "lib",
- "python" + sys.version[:3],
- "site-packages"),
- os.path.join(prefix, "lib", "site-python")])
- else:
- sitedirs.extend(
- [prefix, os.path.join(prefix, "lib", "site-packages")]
- )
- if sys.platform == 'darwin':
- # for framework builds *only* we add the standard Apple
- # locations. Currently only per-user, but /Library and
- # /Network/Library could be added too
- if 'Python.framework' in prefix:
- home = os.environ.get('HOME')
- if home:
- sitedirs.append(
- os.path.join(home,
- 'Library',
- 'Python',
- sys.version[:3],
- 'site-packages'))
- lib_paths = get_path('purelib'), get_path('platlib')
- for site_lib in lib_paths:
- if site_lib not in sitedirs:
- sitedirs.append(site_lib)
-
- if site.ENABLE_USER_SITE:
- sitedirs.append(site.USER_SITE)
-
- sitedirs = list(map(normalize_path, sitedirs))
-
- return sitedirs
-
-
-def expand_paths(inputs):
- """Yield sys.path directories that might contain "old-style" packages"""
-
- seen = {}
-
- for dirname in inputs:
- dirname = normalize_path(dirname)
- if dirname in seen:
- continue
-
- seen[dirname] = 1
- if not os.path.isdir(dirname):
- continue
-
- files = os.listdir(dirname)
- yield dirname, files
-
- for name in files:
- if not name.endswith('.pth'):
- # We only care about the .pth files
- continue
- if name in ('easy-install.pth', 'setuptools.pth'):
- # Ignore .pth files that we control
- continue
-
- # Read the .pth file
- f = open(os.path.join(dirname, name))
- lines = list(yield_lines(f))
- f.close()
-
- # Yield existing non-dupe, non-import directory lines from it
- for line in lines:
- if not line.startswith("import"):
- line = normalize_path(line.rstrip())
- if line not in seen:
- seen[line] = 1
- if not os.path.isdir(line):
- continue
- yield line, os.listdir(line)
-
-
-def extract_wininst_cfg(dist_filename):
- """Extract configuration data from a bdist_wininst .exe
-
- Returns a configparser.RawConfigParser, or None
- """
- f = open(dist_filename, 'rb')
- try:
- endrec = zipfile._EndRecData(f)
- if endrec is None:
- return None
-
- prepended = (endrec[9] - endrec[5]) - endrec[6]
- if prepended < 12: # no wininst data here
- return None
- f.seek(prepended - 12)
-
- tag, cfglen, bmlen = struct.unpack("<iii", f.read(12))
- if tag not in (0x1234567A, 0x1234567B):
- return None # not a valid tag
-
- f.seek(prepended - (12 + cfglen))
- cfg = configparser.RawConfigParser(
- {'version': '', 'target_version': ''})
- try:
- part = f.read(cfglen)
- # Read up to the first null byte.
- config = part.split(b'\0', 1)[0]
- # Now the config is in bytes, but for RawConfigParser, it should
- # be text, so decode it.
- config = config.decode(sys.getfilesystemencoding())
- cfg.readfp(six.StringIO(config))
- except configparser.Error:
- return None
- if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
- return None
- return cfg
-
- finally:
- f.close()
-
-
-def get_exe_prefixes(exe_filename):
- """Get exe->egg path translations for a given .exe file"""
-
- prefixes = [
- ('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''),
- ('PLATLIB/', ''),
- ('SCRIPTS/', 'EGG-INFO/scripts/'),
- ('DATA/lib/site-packages', ''),
- ]
- z = zipfile.ZipFile(exe_filename)
- try:
- for info in z.infolist():
- name = info.filename
- parts = name.split('/')
- if len(parts) == 3 and parts[2] == 'PKG-INFO':
- if parts[1].endswith('.egg-info'):
- prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/'))
- break
- if len(parts) != 2 or not name.endswith('.pth'):
- continue
- if name.endswith('-nspkg.pth'):
- continue
- if parts[0].upper() in ('PURELIB', 'PLATLIB'):
- contents = z.read(name)
- if six.PY3:
- contents = contents.decode()
- for pth in yield_lines(contents):
- pth = pth.strip().replace('\\', '/')
- if not pth.startswith('import'):
- prefixes.append((('%s/%s/' % (parts[0], pth)), ''))
- finally:
- z.close()
- prefixes = [(x.lower(), y) for x, y in prefixes]
- prefixes.sort()
- prefixes.reverse()
- return prefixes
-
-
-def parse_requirement_arg(spec):
- try:
- return Requirement.parse(spec)
- except ValueError:
- raise DistutilsError(
- "Not a URL, existing file, or requirement spec: %r" % (spec,)
- )
-
-
-class PthDistributions(Environment):
- """A .pth file with Distribution paths in it"""
-
- dirty = False
-
- def __init__(self, filename, sitedirs=()):
- self.filename = filename
- self.sitedirs = list(map(normalize_path, sitedirs))
- self.basedir = normalize_path(os.path.dirname(self.filename))
- self._load()
- Environment.__init__(self, [], None, None)
- for path in yield_lines(self.paths):
- list(map(self.add, find_distributions(path, True)))
-
- def _load(self):
- self.paths = []
- saw_import = False
- seen = dict.fromkeys(self.sitedirs)
- if os.path.isfile(self.filename):
- f = open(self.filename, 'rt')
- for line in f:
- if line.startswith('import'):
- saw_import = True
- continue
- path = line.rstrip()
- self.paths.append(path)
- if not path.strip() or path.strip().startswith('#'):
- continue
- # skip non-existent paths, in case somebody deleted a package
- # manually, and duplicate paths as well
- path = self.paths[-1] = normalize_path(
- os.path.join(self.basedir, path)
- )
- if not os.path.exists(path) or path in seen:
- self.paths.pop() # skip it
- self.dirty = True # we cleaned up, so we're dirty now :)
- continue
- seen[path] = 1
- f.close()
-
- if self.paths and not saw_import:
- self.dirty = True # ensure anything we touch has import wrappers
- while self.paths and not self.paths[-1].strip():
- self.paths.pop()
-
- def save(self):
- """Write changed .pth file back to disk"""
- if not self.dirty:
- return
-
- rel_paths = list(map(self.make_relative, self.paths))
- if rel_paths:
- log.debug("Saving %s", self.filename)
- lines = self._wrap_lines(rel_paths)
- data = '\n'.join(lines) + '\n'
-
- if os.path.islink(self.filename):
- os.unlink(self.filename)
- with open(self.filename, 'wt') as f:
- f.write(data)
-
- elif os.path.exists(self.filename):
- log.debug("Deleting empty %s", self.filename)
- os.unlink(self.filename)
-
- self.dirty = False
-
- @staticmethod
- def _wrap_lines(lines):
- return lines
-
- def add(self, dist):
- """Add `dist` to the distribution map"""
- new_path = (
- dist.location not in self.paths and (
- dist.location not in self.sitedirs or
- # account for '.' being in PYTHONPATH
- dist.location == os.getcwd()
- )
- )
- if new_path:
- self.paths.append(dist.location)
- self.dirty = True
- Environment.add(self, dist)
-
- def remove(self, dist):
- """Remove `dist` from the distribution map"""
- while dist.location in self.paths:
- self.paths.remove(dist.location)
- self.dirty = True
- Environment.remove(self, dist)
-
- def make_relative(self, path):
- npath, last = os.path.split(normalize_path(path))
- baselen = len(self.basedir)
- parts = [last]
- sep = os.altsep == '/' and '/' or os.sep
- while len(npath) >= baselen:
- if npath == self.basedir:
- parts.append(os.curdir)
- parts.reverse()
- return sep.join(parts)
- npath, last = os.path.split(npath)
- parts.append(last)
- else:
- return path
-
-
-class RewritePthDistributions(PthDistributions):
-
- @classmethod
- def _wrap_lines(cls, lines):
- yield cls.prelude
- for line in lines:
- yield line
- yield cls.postlude
-
- _inline = lambda text: textwrap.dedent(text).strip().replace('\n', '; ')
- prelude = _inline("""
- import sys
- sys.__plen = len(sys.path)
- """)
- postlude = _inline("""
- import sys
- new = sys.path[sys.__plen:]
- del sys.path[sys.__plen:]
- p = getattr(sys, '__egginsert', 0)
- sys.path[p:p] = new
- sys.__egginsert = p + len(new)
- """)
-
-
-if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'rewrite') == 'rewrite':
- PthDistributions = RewritePthDistributions
-
-
-def _first_line_re():
- """
- Return a regular expression based on first_line_re suitable for matching
- strings.
- """
- if isinstance(first_line_re.pattern, str):
- return first_line_re
-
- # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.
- return re.compile(first_line_re.pattern.decode())
-
-
-def auto_chmod(func, arg, exc):
- if func is os.remove and os.name == 'nt':
- chmod(arg, stat.S_IWRITE)
- return func(arg)
- et, ev, _ = sys.exc_info()
- six.reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg))))
-
-
-def update_dist_caches(dist_path, fix_zipimporter_caches):
- """
- Fix any globally cached `dist_path` related data
-
- `dist_path` should be a path of a newly installed egg distribution (zipped
- or unzipped).
-
- sys.path_importer_cache contains finder objects that have been cached when
- importing data from the original distribution. Any such finders need to be
- cleared since the replacement distribution might be packaged differently,
- e.g. a zipped egg distribution might get replaced with an unzipped egg
- folder or vice versa. Having the old finders cached may then cause Python
- to attempt loading modules from the replacement distribution using an
- incorrect loader.
-
- zipimport.zipimporter objects are Python loaders charged with importing
- data packaged inside zip archives. If stale loaders referencing the
- original distribution, are left behind, they can fail to load modules from
- the replacement distribution. E.g. if an old zipimport.zipimporter instance
- is used to load data from a new zipped egg archive, it may cause the
- operation to attempt to locate the requested data in the wrong location -
- one indicated by the original distribution's zip archive directory
- information. Such an operation may then fail outright, e.g. report having
- read a 'bad local file header', or even worse, it may fail silently &
- return invalid data.
-
- zipimport._zip_directory_cache contains cached zip archive directory
- information for all existing zipimport.zipimporter instances and all such
- instances connected to the same archive share the same cached directory
- information.
-
- If asked, and the underlying Python implementation allows it, we can fix
- all existing zipimport.zipimporter instances instead of having to track
- them down and remove them one by one, by updating their shared cached zip
- archive directory information. This, of course, assumes that the
- replacement distribution is packaged as a zipped egg.
-
- If not asked to fix existing zipimport.zipimporter instances, we still do
- our best to clear any remaining zipimport.zipimporter related cached data
- that might somehow later get used when attempting to load data from the new
- distribution and thus cause such load operations to fail. Note that when
- tracking down such remaining stale data, we can not catch every conceivable
- usage from here, and we clear only those that we know of and have found to
- cause problems if left alive. Any remaining caches should be updated by
- whomever is in charge of maintaining them, i.e. they should be ready to
- handle us replacing their zip archives with new distributions at runtime.
-
- """
- # There are several other known sources of stale zipimport.zipimporter
- # instances that we do not clear here, but might if ever given a reason to
- # do so:
- # * Global setuptools pkg_resources.working_set (a.k.a. 'master working
- # set') may contain distributions which may in turn contain their
- # zipimport.zipimporter loaders.
- # * Several zipimport.zipimporter loaders held by local variables further
- # up the function call stack when running the setuptools installation.
- # * Already loaded modules may have their __loader__ attribute set to the
- # exact loader instance used when importing them. Python 3.4 docs state
- # that this information is intended mostly for introspection and so is
- # not expected to cause us problems.
- normalized_path = normalize_path(dist_path)
- _uncache(normalized_path, sys.path_importer_cache)
- if fix_zipimporter_caches:
- _replace_zip_directory_cache_data(normalized_path)
- else:
- # Here, even though we do not want to fix existing and now stale
- # zipimporter cache information, we still want to remove it. Related to
- # Python's zip archive directory information cache, we clear each of
- # its stale entries in two phases:
- # 1. Clear the entry so attempting to access zip archive information
- # via any existing stale zipimport.zipimporter instances fails.
- # 2. Remove the entry from the cache so any newly constructed
- # zipimport.zipimporter instances do not end up using old stale
- # zip archive directory information.
- # This whole stale data removal step does not seem strictly necessary,
- # but has been left in because it was done before we started replacing
- # the zip archive directory information cache content if possible, and
- # there are no relevant unit tests that we can depend on to tell us if
- # this is really needed.
- _remove_and_clear_zip_directory_cache_data(normalized_path)
-
-
-def _collect_zipimporter_cache_entries(normalized_path, cache):
- """
- Return zipimporter cache entry keys related to a given normalized path.
-
- Alternative path spellings (e.g. those using different character case or
- those using alternative path separators) related to the same path are
- included. Any sub-path entries are included as well, i.e. those
- corresponding to zip archives embedded in other zip archives.
-
- """
- result = []
- prefix_len = len(normalized_path)
- for p in cache:
- np = normalize_path(p)
- if (np.startswith(normalized_path) and
- np[prefix_len:prefix_len + 1] in (os.sep, '')):
- result.append(p)
- return result
-
-
-def _update_zipimporter_cache(normalized_path, cache, updater=None):
- """
- Update zipimporter cache data for a given normalized path.
-
- Any sub-path entries are processed as well, i.e. those corresponding to zip
- archives embedded in other zip archives.
-
- Given updater is a callable taking a cache entry key and the original entry
- (after already removing the entry from the cache), and expected to update
- the entry and possibly return a new one to be inserted in its place.
- Returning None indicates that the entry should not be replaced with a new
- one. If no updater is given, the cache entries are simply removed without
- any additional processing, the same as if the updater simply returned None.
-
- """
- for p in _collect_zipimporter_cache_entries(normalized_path, cache):
- # N.B. pypy's custom zipimport._zip_directory_cache implementation does
- # not support the complete dict interface:
- # * Does not support item assignment, thus not allowing this function
- # to be used only for removing existing cache entries.
- # * Does not support the dict.pop() method, forcing us to use the
- # get/del patterns instead. For more detailed information see the
- # following links:
- # https://github.com/pypa/setuptools/issues/202#issuecomment-202913420
- # https://bitbucket.org/pypy/pypy/src/dd07756a34a41f674c0cacfbc8ae1d4cc9ea2ae4/pypy/module/zipimport/interp_zipimport.py#cl-99
- old_entry = cache[p]
- del cache[p]
- new_entry = updater and updater(p, old_entry)
- if new_entry is not None:
- cache[p] = new_entry
-
-
-def _uncache(normalized_path, cache):
- _update_zipimporter_cache(normalized_path, cache)
-
-
-def _remove_and_clear_zip_directory_cache_data(normalized_path):
- def clear_and_remove_cached_zip_archive_directory_data(path, old_entry):
- old_entry.clear()
-
- _update_zipimporter_cache(
- normalized_path, zipimport._zip_directory_cache,
- updater=clear_and_remove_cached_zip_archive_directory_data)
-
-# PyPy Python implementation does not allow directly writing to the
-# zipimport._zip_directory_cache and so prevents us from attempting to correct
-# its content. The best we can do there is clear the problematic cache content
-# and have PyPy repopulate it as needed. The downside is that if there are any
-# stale zipimport.zipimporter instances laying around, attempting to use them
-# will fail due to not having its zip archive directory information available
-# instead of being automatically corrected to use the new correct zip archive
-# directory information.
-if '__pypy__' in sys.builtin_module_names:
- _replace_zip_directory_cache_data = \
- _remove_and_clear_zip_directory_cache_data
-else:
- def _replace_zip_directory_cache_data(normalized_path):
- def replace_cached_zip_archive_directory_data(path, old_entry):
- # N.B. In theory, we could load the zip directory information just
- # once for all updated path spellings, and then copy it locally and
- # update its contained path strings to contain the correct
- # spelling, but that seems like a way too invasive move (this cache
- # structure is not officially documented anywhere and could in
- # theory change with new Python releases) for no significant
- # benefit.
- old_entry.clear()
- zipimport.zipimporter(path)
- old_entry.update(zipimport._zip_directory_cache[path])
- return old_entry
-
- _update_zipimporter_cache(
- normalized_path, zipimport._zip_directory_cache,
- updater=replace_cached_zip_archive_directory_data)
-
-
-def is_python(text, filename='<string>'):
- "Is this string a valid Python script?"
- try:
- compile(text, filename, 'exec')
- except (SyntaxError, TypeError):
- return False
- else:
- return True
-
-
-def is_sh(executable):
- """Determine if the specified executable is a .sh (contains a #! line)"""
- try:
- with io.open(executable, encoding='latin-1') as fp:
- magic = fp.read(2)
- except (OSError, IOError):
- return executable
- return magic == '#!'
-
-
-def nt_quote_arg(arg):
- """Quote a command line argument according to Windows parsing rules"""
- return subprocess.list2cmdline([arg])
-
-
-def is_python_script(script_text, filename):
- """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc.
- """
- if filename.endswith('.py') or filename.endswith('.pyw'):
- return True # extension says it's Python
- if is_python(script_text, filename):
- return True # it's syntactically valid Python
- if script_text.startswith('#!'):
- # It begins with a '#!' line, so check if 'python' is in it somewhere
- return 'python' in script_text.splitlines()[0].lower()
-
- return False # Not any Python I can recognize
-
-
-try:
- from os import chmod as _chmod
-except ImportError:
- # Jython compatibility
- def _chmod(*args):
- pass
-
-
-def chmod(path, mode):
- log.debug("changing mode of %s to %o", path, mode)
- try:
- _chmod(path, mode)
- except os.error as e:
- log.debug("chmod failed: %s", e)
-
-
-class CommandSpec(list):
- """
- A command spec for a #! header, specified as a list of arguments akin to
- those passed to Popen.
- """
-
- options = []
- split_args = dict()
-
- @classmethod
- def best(cls):
- """
- Choose the best CommandSpec class based on environmental conditions.
- """
- return cls
-
- @classmethod
- def _sys_executable(cls):
- _default = os.path.normpath(sys.executable)
- return os.environ.get('__PYVENV_LAUNCHER__', _default)
-
- @classmethod
- def from_param(cls, param):
- """
- Construct a CommandSpec from a parameter to build_scripts, which may
- be None.
- """
- if isinstance(param, cls):
- return param
- if isinstance(param, list):
- return cls(param)
- if param is None:
- return cls.from_environment()
- # otherwise, assume it's a string.
- return cls.from_string(param)
-
- @classmethod
- def from_environment(cls):
- return cls([cls._sys_executable()])
-
- @classmethod
- def from_string(cls, string):
- """
- Construct a command spec from a simple string representing a command
- line parseable by shlex.split.
- """
- items = shlex.split(string, **cls.split_args)
- return cls(items)
-
- def install_options(self, script_text):
- self.options = shlex.split(self._extract_options(script_text))
- cmdline = subprocess.list2cmdline(self)
- if not isascii(cmdline):
- self.options[:0] = ['-x']
-
- @staticmethod
- def _extract_options(orig_script):
- """
- Extract any options from the first line of the script.
- """
- first = (orig_script + '\n').splitlines()[0]
- match = _first_line_re().match(first)
- options = match.group(1) or '' if match else ''
- return options.strip()
-
- def as_header(self):
- return self._render(self + list(self.options))
-
- @staticmethod
- def _render(items):
- cmdline = subprocess.list2cmdline(items)
- return '#!' + cmdline + '\n'
-
-# For pbr compat; will be removed in a future version.
-sys_executable = CommandSpec._sys_executable()
-
-
-class WindowsCommandSpec(CommandSpec):
- split_args = dict(posix=False)
-
-
-class ScriptWriter(object):
- """
- Encapsulates behavior around writing entry point scripts for console and
- gui apps.
- """
-
- template = textwrap.dedent("""
- # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r
- __requires__ = %(spec)r
- import sys
- from pkg_resources import load_entry_point
-
- if __name__ == '__main__':
- sys.exit(
- load_entry_point(%(spec)r, %(group)r, %(name)r)()
- )
- """).lstrip()
-
- command_spec_class = CommandSpec
-
- @classmethod
- def get_script_args(cls, dist, executable=None, wininst=False):
- # for backward compatibility
- warnings.warn("Use get_args", DeprecationWarning)
- writer = (WindowsScriptWriter if wininst else ScriptWriter).best()
- header = cls.get_script_header("", executable, wininst)
- return writer.get_args(dist, header)
-
- @classmethod
- def get_script_header(cls, script_text, executable=None, wininst=False):
- # for backward compatibility
- warnings.warn("Use get_header", DeprecationWarning)
- if wininst:
- executable = "python.exe"
- cmd = cls.command_spec_class.best().from_param(executable)
- cmd.install_options(script_text)
- return cmd.as_header()
-
- @classmethod
- def get_args(cls, dist, header=None):
- """
- Yield write_script() argument tuples for a distribution's
- console_scripts and gui_scripts entry points.
- """
- if header is None:
- header = cls.get_header()
- spec = str(dist.as_requirement())
- for type_ in 'console', 'gui':
- group = type_ + '_scripts'
- for name, ep in dist.get_entry_map(group).items():
- cls._ensure_safe_name(name)
- script_text = cls.template % locals()
- args = cls._get_script_args(type_, name, header, script_text)
- for res in args:
- yield res
-
- @staticmethod
- def _ensure_safe_name(name):
- """
- Prevent paths in *_scripts entry point names.
- """
- has_path_sep = re.search(r'[\\/]', name)
- if has_path_sep:
- raise ValueError("Path separators not allowed in script names")
-
- @classmethod
- def get_writer(cls, force_windows):
- # for backward compatibility
- warnings.warn("Use best", DeprecationWarning)
- return WindowsScriptWriter.best() if force_windows else cls.best()
-
- @classmethod
- def best(cls):
- """
- Select the best ScriptWriter for this environment.
- """
- if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'):
- return WindowsScriptWriter.best()
- else:
- return cls
-
- @classmethod
- def _get_script_args(cls, type_, name, header, script_text):
- # Simply write the stub with no extension.
- yield (name, header + script_text)
-
- @classmethod
- def get_header(cls, script_text="", executable=None):
- """Create a #! line, getting options (if any) from script_text"""
- cmd = cls.command_spec_class.best().from_param(executable)
- cmd.install_options(script_text)
- return cmd.as_header()
-
-
-class WindowsScriptWriter(ScriptWriter):
- command_spec_class = WindowsCommandSpec
-
- @classmethod
- def get_writer(cls):
- # for backward compatibility
- warnings.warn("Use best", DeprecationWarning)
- return cls.best()
-
- @classmethod
- def best(cls):
- """
- Select the best ScriptWriter suitable for Windows
- """
- writer_lookup = dict(
- executable=WindowsExecutableLauncherWriter,
- natural=cls,
- )
- # for compatibility, use the executable launcher by default
- launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable')
- return writer_lookup[launcher]
-
- @classmethod
- def _get_script_args(cls, type_, name, header, script_text):
- "For Windows, add a .py extension"
- ext = dict(console='.pya', gui='.pyw')[type_]
- if ext not in os.environ['PATHEXT'].lower().split(';'):
- warnings.warn("%s not listed in PATHEXT; scripts will not be "
- "recognized as executables." % ext, UserWarning)
- old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe']
- old.remove(ext)
- header = cls._adjust_header(type_, header)
- blockers = [name + x for x in old]
- yield name + ext, header + script_text, 't', blockers
-
- @classmethod
- def _adjust_header(cls, type_, orig_header):
- """
- Make sure 'pythonw' is used for gui and and 'python' is used for
- console (regardless of what sys.executable is).
- """
- pattern = 'pythonw.exe'
- repl = 'python.exe'
- if type_ == 'gui':
- pattern, repl = repl, pattern
- pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE)
- new_header = pattern_ob.sub(string=orig_header, repl=repl)
- return new_header if cls._use_header(new_header) else orig_header
-
- @staticmethod
- def _use_header(new_header):
- """
- Should _adjust_header use the replaced header?
-
- On non-windows systems, always use. On
- Windows systems, only use the replaced header if it resolves
- to an executable on the system.
- """
- clean_header = new_header[2:-1].strip('"')
- return sys.platform != 'win32' or find_executable(clean_header)
-
-
-class WindowsExecutableLauncherWriter(WindowsScriptWriter):
- @classmethod
- def _get_script_args(cls, type_, name, header, script_text):
- """
- For Windows, add a .py extension and an .exe launcher
- """
- if type_ == 'gui':
- launcher_type = 'gui'
- ext = '-script.pyw'
- old = ['.pyw']
- else:
- launcher_type = 'cli'
- ext = '-script.py'
- old = ['.py', '.pyc', '.pyo']
- hdr = cls._adjust_header(type_, header)
- blockers = [name + x for x in old]
- yield (name + ext, hdr + script_text, 't', blockers)
- yield (
- name + '.exe', get_win_launcher(launcher_type),
- 'b' # write in binary mode
- )
- if not is_64bit():
- # install a manifest for the launcher to prevent Windows
- # from detecting it as an installer (which it will for
- # launchers like easy_install.exe). Consider only
- # adding a manifest for launchers detected as installers.
- # See Distribute #143 for details.
- m_name = name + '.exe.manifest'
- yield (m_name, load_launcher_manifest(name), 't')
-
-
-# for backward-compatibility
-get_script_args = ScriptWriter.get_script_args
-get_script_header = ScriptWriter.get_script_header
-
-
-def get_win_launcher(type):
- """
- Load the Windows launcher (executable) suitable for launching a script.
-
- `type` should be either 'cli' or 'gui'
-
- Returns the executable as a byte string.
- """
- launcher_fn = '%s.exe' % type
- if platform.machine().lower() == 'arm':
- launcher_fn = launcher_fn.replace(".", "-arm.")
- if is_64bit():
- launcher_fn = launcher_fn.replace(".", "-64.")
- else:
- launcher_fn = launcher_fn.replace(".", "-32.")
- return resource_string('setuptools', launcher_fn)
-
-
-def load_launcher_manifest(name):
- manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml')
- if six.PY2:
- return manifest % vars()
- else:
- return manifest.decode('utf-8') % vars()
-
-
-def rmtree(path, ignore_errors=False, onerror=auto_chmod):
- """Recursively delete a directory tree.
-
- This code is taken from the Python 2.4 version of 'shutil', because
- the 2.3 version doesn't really work right.
- """
- if ignore_errors:
- def onerror(*args):
- pass
- elif onerror is None:
- def onerror(*args):
- raise
- names = []
- try:
- names = os.listdir(path)
- except os.error:
- onerror(os.listdir, path, sys.exc_info())
- for name in names:
- fullname = os.path.join(path, name)
- try:
- mode = os.lstat(fullname).st_mode
- except os.error:
- mode = 0
- if stat.S_ISDIR(mode):
- rmtree(fullname, ignore_errors, onerror)
- else:
- try:
- os.remove(fullname)
- except os.error:
- onerror(os.remove, fullname, sys.exc_info())
- try:
- os.rmdir(path)
- except os.error:
- onerror(os.rmdir, path, sys.exc_info())
-
-
-def current_umask():
- tmp = os.umask(0o022)
- os.umask(tmp)
- return tmp
-
-
-def bootstrap():
- # This function is called when setuptools*.egg is run using /bin/sh
- import setuptools
-
- argv0 = os.path.dirname(setuptools.__path__[0])
- sys.argv[0] = argv0
- sys.argv.append(argv0)
- main()
-
-
-def main(argv=None, **kw):
- from setuptools import setup
- from setuptools.dist import Distribution
-
- class DistributionWithoutHelpCommands(Distribution):
- common_usage = ""
-
- def _show_help(self, *args, **kw):
- with _patch_usage():
- Distribution._show_help(self, *args, **kw)
-
- if argv is None:
- argv = sys.argv[1:]
-
- with _patch_usage():
- setup(
- script_args=['-q', 'easy_install', '-v'] + argv,
- script_name=sys.argv[0] or 'easy_install',
- distclass=DistributionWithoutHelpCommands, **kw
- )
-
-
-@contextlib.contextmanager
-def _patch_usage():
- import distutils.core
- USAGE = textwrap.dedent("""
- usage: %(script)s [options] requirement_or_url ...
- or: %(script)s --help
- """).lstrip()
-
- def gen_usage(script_name):
- return USAGE % dict(
- script=os.path.basename(script_name),
- )
-
- saved = distutils.core.gen_usage
- distutils.core.gen_usage = gen_usage
- try:
- yield
- finally:
- distutils.core.gen_usage = saved
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
deleted file mode 100755
index 8e1502a5..00000000
--- a/setuptools/command/egg_info.py
+++ /dev/null
@@ -1,489 +0,0 @@
-"""setuptools.command.egg_info
-
-Create a distribution's .egg-info directory and contents"""
-
-from distutils.filelist import FileList as _FileList
-from distutils.util import convert_path
-from distutils import log
-import distutils.errors
-import distutils.filelist
-import os
-import re
-import sys
-import io
-import warnings
-import time
-import collections
-
-from setuptools.extern import six
-from setuptools.extern.six.moves import map
-
-from setuptools import Command
-from setuptools.command.sdist import sdist
-from setuptools.command.sdist import walk_revctrl
-from setuptools.command.setopt import edit_config
-from setuptools.command import bdist_egg
-from pkg_resources import (
- parse_requirements, safe_name, parse_version,
- safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
-import setuptools.unicode_utils as unicode_utils
-
-from pkg_resources.extern import packaging
-
-try:
- from setuptools_svn import svn_utils
-except ImportError:
- pass
-
-
-class egg_info(Command):
- description = "create a distribution's .egg-info directory"
-
- user_options = [
- ('egg-base=', 'e', "directory containing .egg-info directories"
- " (default: top of the source tree)"),
- ('tag-svn-revision', 'r',
- "Add subversion revision ID to version number"),
- ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
- ('tag-build=', 'b', "Specify explicit tag to add to version number"),
- ('no-svn-revision', 'R',
- "Don't add subversion revision ID [default]"),
- ('no-date', 'D', "Don't include date stamp [default]"),
- ]
-
- boolean_options = ['tag-date', 'tag-svn-revision']
- negative_opt = {'no-svn-revision': 'tag-svn-revision',
- 'no-date': 'tag-date'}
-
- def initialize_options(self):
- self.egg_name = None
- self.egg_version = None
- self.egg_base = None
- self.egg_info = None
- self.tag_build = None
- self.tag_svn_revision = 0
- self.tag_date = 0
- self.broken_egg_info = False
- self.vtags = None
-
- def save_version_info(self, filename):
- """
- Materialize the values of svn_revision and date into the
- build tag. Install these keys in a deterministic order
- to avoid arbitrary reordering on subsequent builds.
- """
- # python 2.6 compatibility
- odict = getattr(collections, 'OrderedDict', dict)
- egg_info = odict()
- # follow the order these keys would have been added
- # when PYTHONHASHSEED=0
- egg_info['tag_build'] = self.tags()
- egg_info['tag_date'] = 0
- egg_info['tag_svn_revision'] = 0
- edit_config(filename, dict(egg_info=egg_info))
-
- def finalize_options(self):
- self.egg_name = safe_name(self.distribution.get_name())
- self.vtags = self.tags()
- self.egg_version = self.tagged_version()
-
- parsed_version = parse_version(self.egg_version)
-
- try:
- is_version = isinstance(parsed_version, packaging.version.Version)
- spec = (
- "%s==%s" if is_version else "%s===%s"
- )
- list(
- parse_requirements(spec % (self.egg_name, self.egg_version))
- )
- except ValueError:
- raise distutils.errors.DistutilsOptionError(
- "Invalid distribution name or version syntax: %s-%s" %
- (self.egg_name, self.egg_version)
- )
-
- if self.egg_base is None:
- dirs = self.distribution.package_dir
- self.egg_base = (dirs or {}).get('', os.curdir)
-
- self.ensure_dirname('egg_base')
- self.egg_info = to_filename(self.egg_name) + '.egg-info'
- if self.egg_base != os.curdir:
- self.egg_info = os.path.join(self.egg_base, self.egg_info)
- if '-' in self.egg_name:
- self.check_broken_egg_info()
-
- # Set package version for the benefit of dumber commands
- # (e.g. sdist, bdist_wininst, etc.)
- #
- self.distribution.metadata.version = self.egg_version
-
- # If we bootstrapped around the lack of a PKG-INFO, as might be the
- # case in a fresh checkout, make sure that any special tags get added
- # to the version info
- #
- pd = self.distribution._patched_dist
- if pd is not None and pd.key == self.egg_name.lower():
- pd._version = self.egg_version
- pd._parsed_version = parse_version(self.egg_version)
- self.distribution._patched_dist = None
-
- def write_or_delete_file(self, what, filename, data, force=False):
- """Write `data` to `filename` or delete if empty
-
- If `data` is non-empty, this routine is the same as ``write_file()``.
- If `data` is empty but not ``None``, this is the same as calling
- ``delete_file(filename)`. If `data` is ``None``, then this is a no-op
- unless `filename` exists, in which case a warning is issued about the
- orphaned file (if `force` is false), or deleted (if `force` is true).
- """
- if data:
- self.write_file(what, filename, data)
- elif os.path.exists(filename):
- if data is None and not force:
- log.warn(
- "%s not set in setup(), but %s exists", what, filename
- )
- return
- else:
- self.delete_file(filename)
-
- def write_file(self, what, filename, data):
- """Write `data` to `filename` (if not a dry run) after announcing it
-
- `what` is used in a log message to identify what is being written
- to the file.
- """
- log.info("writing %s to %s", what, filename)
- if six.PY3:
- data = data.encode("utf-8")
- if not self.dry_run:
- f = open(filename, 'wb')
- f.write(data)
- f.close()
-
- def delete_file(self, filename):
- """Delete `filename` (if not a dry run) after announcing it"""
- log.info("deleting %s", filename)
- if not self.dry_run:
- os.unlink(filename)
-
- def tagged_version(self):
- version = self.distribution.get_version()
- # egg_info may be called more than once for a distribution,
- # in which case the version string already contains all tags.
- if self.vtags and version.endswith(self.vtags):
- return safe_version(version)
- return safe_version(version + self.vtags)
-
- def run(self):
- self.mkpath(self.egg_info)
- installer = self.distribution.fetch_build_egg
- for ep in iter_entry_points('egg_info.writers'):
- ep.require(installer=installer)
- writer = ep.resolve()
- writer(self, ep.name, os.path.join(self.egg_info, ep.name))
-
- # Get rid of native_libs.txt if it was put there by older bdist_egg
- nl = os.path.join(self.egg_info, "native_libs.txt")
- if os.path.exists(nl):
- self.delete_file(nl)
-
- self.find_sources()
-
- def tags(self):
- version = ''
- if self.tag_build:
- version += self.tag_build
- if self.tag_svn_revision:
- version += '-r%s' % self.get_svn_revision()
- if self.tag_date:
- version += time.strftime("-%Y%m%d")
- return version
-
- @staticmethod
- def get_svn_revision():
- if 'svn_utils' not in globals():
- return "0"
- return str(svn_utils.SvnInfo.load(os.curdir).get_revision())
-
- def find_sources(self):
- """Generate SOURCES.txt manifest file"""
- manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
- mm = manifest_maker(self.distribution)
- mm.manifest = manifest_filename
- mm.run()
- self.filelist = mm.filelist
-
- def check_broken_egg_info(self):
- bei = self.egg_name + '.egg-info'
- if self.egg_base != os.curdir:
- bei = os.path.join(self.egg_base, bei)
- if os.path.exists(bei):
- log.warn(
- "-" * 78 + '\n'
- "Note: Your current .egg-info directory has a '-' in its name;"
- '\nthis will not work correctly with "setup.py develop".\n\n'
- 'Please rename %s to %s to correct this problem.\n' + '-' * 78,
- bei, self.egg_info
- )
- self.broken_egg_info = self.egg_info
- self.egg_info = bei # make it work for now
-
-
-class FileList(_FileList):
- """File list that accepts only existing, platform-independent paths"""
-
- def append(self, item):
- if item.endswith('\r'): # Fix older sdists built on Windows
- item = item[:-1]
- path = convert_path(item)
-
- if self._safe_path(path):
- self.files.append(path)
-
- def extend(self, paths):
- self.files.extend(filter(self._safe_path, paths))
-
- def _repair(self):
- """
- Replace self.files with only safe paths
-
- Because some owners of FileList manipulate the underlying
- ``files`` attribute directly, this method must be called to
- repair those paths.
- """
- self.files = list(filter(self._safe_path, self.files))
-
- def _safe_path(self, path):
- enc_warn = "'%s' not %s encodable -- skipping"
-
- # To avoid accidental trans-codings errors, first to unicode
- u_path = unicode_utils.filesys_decode(path)
- if u_path is None:
- log.warn("'%s' in unexpected encoding -- skipping" % path)
- return False
-
- # Must ensure utf-8 encodability
- utf8_path = unicode_utils.try_encode(u_path, "utf-8")
- if utf8_path is None:
- log.warn(enc_warn, path, 'utf-8')
- return False
-
- try:
- # accept is either way checks out
- if os.path.exists(u_path) or os.path.exists(utf8_path):
- return True
- # this will catch any encode errors decoding u_path
- except UnicodeEncodeError:
- log.warn(enc_warn, path, sys.getfilesystemencoding())
-
-
-class manifest_maker(sdist):
- template = "MANIFEST.in"
-
- def initialize_options(self):
- self.use_defaults = 1
- self.prune = 1
- self.manifest_only = 1
- self.force_manifest = 1
-
- def finalize_options(self):
- pass
-
- def run(self):
- self.filelist = FileList()
- if not os.path.exists(self.manifest):
- self.write_manifest() # it must exist so it'll get in the list
- self.filelist.findall()
- self.add_defaults()
- if os.path.exists(self.template):
- self.read_template()
- self.prune_file_list()
- self.filelist.sort()
- self.filelist.remove_duplicates()
- self.write_manifest()
-
- def _manifest_normalize(self, path):
- path = unicode_utils.filesys_decode(path)
- return path.replace(os.sep, '/')
-
- def write_manifest(self):
- """
- Write the file list in 'self.filelist' to the manifest file
- named by 'self.manifest'.
- """
- self.filelist._repair()
-
- # Now _repairs should encodability, but not unicode
- files = [self._manifest_normalize(f) for f in self.filelist.files]
- msg = "writing manifest file '%s'" % self.manifest
- self.execute(write_file, (self.manifest, files), msg)
-
- def warn(self, msg): # suppress missing-file warnings from sdist
- if not msg.startswith("standard file not found:"):
- sdist.warn(self, msg)
-
- def add_defaults(self):
- sdist.add_defaults(self)
- self.filelist.append(self.template)
- self.filelist.append(self.manifest)
- rcfiles = list(walk_revctrl())
- if rcfiles:
- self.filelist.extend(rcfiles)
- elif os.path.exists(self.manifest):
- self.read_manifest()
- ei_cmd = self.get_finalized_command('egg_info')
- self._add_egg_info(cmd=ei_cmd)
- self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
-
- def _add_egg_info(self, cmd):
- """
- Add paths for egg-info files for an external egg-base.
-
- The egg-info files are written to egg-base. If egg-base is
- outside the current working directory, this method
- searchs the egg-base directory for files to include
- in the manifest. Uses distutils.filelist.findall (which is
- really the version monkeypatched in by setuptools/__init__.py)
- to perform the search.
-
- Since findall records relative paths, prefix the returned
- paths with cmd.egg_base, so add_default's include_pattern call
- (which is looking for the absolute cmd.egg_info) will match
- them.
- """
- if cmd.egg_base == os.curdir:
- # egg-info files were already added by something else
- return
-
- discovered = distutils.filelist.findall(cmd.egg_base)
- resolved = (os.path.join(cmd.egg_base, path) for path in discovered)
- self.filelist.allfiles.extend(resolved)
-
- def prune_file_list(self):
- build = self.get_finalized_command('build')
- base_dir = self.distribution.get_fullname()
- self.filelist.exclude_pattern(None, prefix=build.build_base)
- self.filelist.exclude_pattern(None, prefix=base_dir)
- sep = re.escape(os.sep)
- self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep,
- is_regex=1)
-
-
-def write_file(filename, contents):
- """Create a file with the specified name and write 'contents' (a
- sequence of strings without line terminators) to it.
- """
- contents = "\n".join(contents)
-
- # assuming the contents has been vetted for utf-8 encoding
- contents = contents.encode("utf-8")
-
- with open(filename, "wb") as f: # always write POSIX-style manifest
- f.write(contents)
-
-
-def write_pkg_info(cmd, basename, filename):
- log.info("writing %s", filename)
- if not cmd.dry_run:
- metadata = cmd.distribution.metadata
- metadata.version, oldver = cmd.egg_version, metadata.version
- metadata.name, oldname = cmd.egg_name, metadata.name
- try:
- # write unescaped data to PKG-INFO, so older pkg_resources
- # can still parse it
- metadata.write_pkg_info(cmd.egg_info)
- finally:
- metadata.name, metadata.version = oldname, oldver
-
- safe = getattr(cmd.distribution, 'zip_safe', None)
-
- bdist_egg.write_safety_flag(cmd.egg_info, safe)
-
-
-def warn_depends_obsolete(cmd, basename, filename):
- if os.path.exists(filename):
- log.warn(
- "WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
- "Use the install_requires/extras_require setup() args instead."
- )
-
-
-def _write_requirements(stream, reqs):
- lines = yield_lines(reqs or ())
- append_cr = lambda line: line + '\n'
- lines = map(append_cr, lines)
- stream.writelines(lines)
-
-
-def write_requirements(cmd, basename, filename):
- dist = cmd.distribution
- data = six.StringIO()
- _write_requirements(data, dist.install_requires)
- extras_require = dist.extras_require or {}
- for extra in sorted(extras_require):
- data.write('\n[{extra}]\n'.format(**vars()))
- _write_requirements(data, extras_require[extra])
- cmd.write_or_delete_file("requirements", filename, data.getvalue())
-
-
-def write_setup_requirements(cmd, basename, filename):
- data = StringIO()
- _write_requirements(data, cmd.distribution.setup_requires)
- cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
-
-
-def write_toplevel_names(cmd, basename, filename):
- pkgs = dict.fromkeys(
- [
- k.split('.', 1)[0]
- for k in cmd.distribution.iter_distribution_names()
- ]
- )
- cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
-
-
-def overwrite_arg(cmd, basename, filename):
- write_arg(cmd, basename, filename, True)
-
-
-def write_arg(cmd, basename, filename, force=False):
- argname = os.path.splitext(basename)[0]
- value = getattr(cmd.distribution, argname, None)
- if value is not None:
- value = '\n'.join(value) + '\n'
- cmd.write_or_delete_file(argname, filename, value, force)
-
-
-def write_entries(cmd, basename, filename):
- ep = cmd.distribution.entry_points
-
- if isinstance(ep, six.string_types) or ep is None:
- data = ep
- elif ep is not None:
- data = []
- for section, contents in sorted(ep.items()):
- if not isinstance(contents, six.string_types):
- contents = EntryPoint.parse_group(section, contents)
- contents = '\n'.join(sorted(map(str, contents.values())))
- data.append('[%s]\n%s\n\n' % (section, contents))
- data = ''.join(data)
-
- cmd.write_or_delete_file('entry points', filename, data, True)
-
-
-def get_pkg_info_revision():
- """
- Get a -r### off of PKG-INFO Version in case this is an sdist of
- a subversion revision.
- """
- warnings.warn("get_pkg_info_revision is deprecated.", DeprecationWarning)
- if os.path.exists('PKG-INFO'):
- with io.open('PKG-INFO') as f:
- for line in f:
- match = re.match(r"Version:.*-r(\d+)\s*$", line)
- if match:
- return int(match.group(1))
- return 0
diff --git a/setuptools/command/install.py b/setuptools/command/install.py
deleted file mode 100644
index 31a5ddb5..00000000
--- a/setuptools/command/install.py
+++ /dev/null
@@ -1,125 +0,0 @@
-from distutils.errors import DistutilsArgError
-import inspect
-import glob
-import warnings
-import platform
-import distutils.command.install as orig
-
-import setuptools
-
-# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
-# now. See https://github.com/pypa/setuptools/issues/199/
-_install = orig.install
-
-
-class install(orig.install):
- """Use easy_install to install the package, w/dependencies"""
-
- user_options = orig.install.user_options + [
- ('old-and-unmanageable', None, "Try not to use this!"),
- ('single-version-externally-managed', None,
- "used by system package builders to create 'flat' eggs"),
- ]
- boolean_options = orig.install.boolean_options + [
- 'old-and-unmanageable', 'single-version-externally-managed',
- ]
- new_commands = [
- ('install_egg_info', lambda self: True),
- ('install_scripts', lambda self: True),
- ]
- _nc = dict(new_commands)
-
- def initialize_options(self):
- orig.install.initialize_options(self)
- self.old_and_unmanageable = None
- self.single_version_externally_managed = None
-
- def finalize_options(self):
- orig.install.finalize_options(self)
- if self.root:
- self.single_version_externally_managed = True
- elif self.single_version_externally_managed:
- if not self.root and not self.record:
- raise DistutilsArgError(
- "You must specify --record or --root when building system"
- " packages"
- )
-
- def handle_extra_path(self):
- if self.root or self.single_version_externally_managed:
- # explicit backward-compatibility mode, allow extra_path to work
- return orig.install.handle_extra_path(self)
-
- # Ignore extra_path when installing an egg (or being run by another
- # command without --root or --single-version-externally-managed
- self.path_file = None
- self.extra_dirs = ''
-
- def run(self):
- # Explicit request for old-style install? Just do it
- if self.old_and_unmanageable or self.single_version_externally_managed:
- return orig.install.run(self)
-
- if not self._called_from_setup(inspect.currentframe()):
- # Run in backward-compatibility mode to support bdist_* commands.
- orig.install.run(self)
- else:
- self.do_egg_install()
-
- @staticmethod
- def _called_from_setup(run_frame):
- """
- Attempt to detect whether run() was called from setup() or by another
- command. If called by setup(), the parent caller will be the
- 'run_command' method in 'distutils.dist', and *its* caller will be
- the 'run_commands' method. If called any other way, the
- immediate caller *might* be 'run_command', but it won't have been
- called by 'run_commands'. Return True in that case or if a call stack
- is unavailable. Return False otherwise.
- """
- if run_frame is None:
- msg = "Call stack not available. bdist_* commands may fail."
- warnings.warn(msg)
- if platform.python_implementation() == 'IronPython':
- msg = "For best results, pass -X:Frames to enable call stack."
- warnings.warn(msg)
- return True
- res = inspect.getouterframes(run_frame)[2]
- caller, = res[:1]
- info = inspect.getframeinfo(caller)
- caller_module = caller.f_globals.get('__name__', '')
- return (
- caller_module == 'distutils.dist'
- and info.function == 'run_commands'
- )
-
- def do_egg_install(self):
-
- easy_install = self.distribution.get_command_class('easy_install')
-
- cmd = easy_install(
- self.distribution, args="x", root=self.root, record=self.record,
- )
- cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
- cmd.always_copy_from = '.' # make sure local-dir eggs get installed
-
- # pick up setup-dir .egg files only: no .egg-info
- cmd.package_index.scan(glob.glob('*.egg'))
-
- self.run_command('bdist_egg')
- args = [self.distribution.get_command_obj('bdist_egg').egg_output]
-
- if setuptools.bootstrap_install_from:
- # Bootstrap self-installation of setuptools
- args.insert(0, setuptools.bootstrap_install_from)
-
- cmd.args = args
- cmd.run()
- setuptools.bootstrap_install_from = None
-
-
-# XXX Python 3.1 doesn't see _nc if this is inside the class
-install.sub_commands = (
- [cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] +
- install.new_commands
-)
diff --git a/setuptools/command/install_egg_info.py b/setuptools/command/install_egg_info.py
deleted file mode 100755
index 60b615d2..00000000
--- a/setuptools/command/install_egg_info.py
+++ /dev/null
@@ -1,118 +0,0 @@
-from distutils import log, dir_util
-import os
-
-from setuptools.extern.six.moves import map
-
-from setuptools import Command
-from setuptools.archive_util import unpack_archive
-import pkg_resources
-
-
-class install_egg_info(Command):
- """Install an .egg-info directory for the package"""
-
- description = "Install an .egg-info directory for the package"
-
- user_options = [
- ('install-dir=', 'd', "directory to install to"),
- ]
-
- def initialize_options(self):
- self.install_dir = None
-
- def finalize_options(self):
- self.set_undefined_options('install_lib',
- ('install_dir', 'install_dir'))
- ei_cmd = self.get_finalized_command("egg_info")
- basename = pkg_resources.Distribution(
- None, None, ei_cmd.egg_name, ei_cmd.egg_version
- ).egg_name() + '.egg-info'
- self.source = ei_cmd.egg_info
- self.target = os.path.join(self.install_dir, basename)
- self.outputs = []
-
- def run(self):
- self.run_command('egg_info')
- if os.path.isdir(self.target) and not os.path.islink(self.target):
- dir_util.remove_tree(self.target, dry_run=self.dry_run)
- elif os.path.exists(self.target):
- self.execute(os.unlink, (self.target,), "Removing " + self.target)
- if not self.dry_run:
- pkg_resources.ensure_directory(self.target)
- self.execute(
- self.copytree, (), "Copying %s to %s" % (self.source, self.target)
- )
- self.install_namespaces()
-
- def get_outputs(self):
- return self.outputs
-
- def copytree(self):
- # Copy the .egg-info tree to site-packages
- def skimmer(src, dst):
- # filter out source-control directories; note that 'src' is always
- # a '/'-separated path, regardless of platform. 'dst' is a
- # platform-specific path.
- for skip in '.svn/', 'CVS/':
- if src.startswith(skip) or '/' + skip in src:
- return None
- self.outputs.append(dst)
- log.debug("Copying %s to %s", src, dst)
- return dst
-
- unpack_archive(self.source, self.target, skimmer)
-
- def install_namespaces(self):
- nsp = self._get_all_ns_packages()
- if not nsp:
- return
- filename, ext = os.path.splitext(self.target)
- filename += '-nspkg.pth'
- self.outputs.append(filename)
- log.info("Installing %s", filename)
- lines = map(self._gen_nspkg_line, nsp)
-
- if self.dry_run:
- # always generate the lines, even in dry run
- list(lines)
- return
-
- with open(filename, 'wt') as f:
- f.writelines(lines)
-
- _nspkg_tmpl = (
- "import sys, types, os",
- "p = os.path.join(sys._getframe(1).f_locals['sitedir'], *%(pth)r)",
- "ie = os.path.exists(os.path.join(p,'__init__.py'))",
- "m = not ie and "
- "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))",
- "mp = (m or []) and m.__dict__.setdefault('__path__',[])",
- "(p not in mp) and mp.append(p)",
- )
- "lines for the namespace installer"
-
- _nspkg_tmpl_multi = (
- 'm and setattr(sys.modules[%(parent)r], %(child)r, m)',
- )
- "additional line(s) when a parent package is indicated"
-
- @classmethod
- def _gen_nspkg_line(cls, pkg):
- # ensure pkg is not a unicode string under Python 2.7
- pkg = str(pkg)
- pth = tuple(pkg.split('.'))
- tmpl_lines = cls._nspkg_tmpl
- parent, sep, child = pkg.rpartition('.')
- if parent:
- tmpl_lines += cls._nspkg_tmpl_multi
- return ';'.join(tmpl_lines) % locals() + '\n'
-
- def _get_all_ns_packages(self):
- """Return sorted list of all package namespaces"""
- nsp = set()
- for pkg in self.distribution.namespace_packages or []:
- pkg = pkg.split('.')
- while pkg:
- nsp.add('.'.join(pkg))
- pkg.pop()
- return sorted(nsp)
diff --git a/setuptools/command/install_lib.py b/setuptools/command/install_lib.py
deleted file mode 100644
index 78fe6891..00000000
--- a/setuptools/command/install_lib.py
+++ /dev/null
@@ -1,120 +0,0 @@
-import os
-import imp
-from itertools import product, starmap
-import distutils.command.install_lib as orig
-
-class install_lib(orig.install_lib):
- """Don't add compiled flags to filenames of non-Python files"""
-
- def run(self):
- self.build()
- outfiles = self.install()
- if outfiles is not None:
- # always compile, in case we have any extension stubs to deal with
- self.byte_compile(outfiles)
-
- def get_exclusions(self):
- """
- Return a collections.Sized collections.Container of paths to be
- excluded for single_version_externally_managed installations.
- """
- all_packages = (
- pkg
- for ns_pkg in self._get_SVEM_NSPs()
- for pkg in self._all_packages(ns_pkg)
- )
-
- excl_specs = product(all_packages, self._gen_exclusion_paths())
- return set(starmap(self._exclude_pkg_path, excl_specs))
-
- def _exclude_pkg_path(self, pkg, exclusion_path):
- """
- Given a package name and exclusion path within that package,
- compute the full exclusion path.
- """
- parts = pkg.split('.') + [exclusion_path]
- return os.path.join(self.install_dir, *parts)
-
- @staticmethod
- def _all_packages(pkg_name):
- """
- >>> list(install_lib._all_packages('foo.bar.baz'))
- ['foo.bar.baz', 'foo.bar', 'foo']
- """
- while pkg_name:
- yield pkg_name
- pkg_name, sep, child = pkg_name.rpartition('.')
-
- def _get_SVEM_NSPs(self):
- """
- Get namespace packages (list) but only for
- single_version_externally_managed installations and empty otherwise.
- """
- # TODO: is it necessary to short-circuit here? i.e. what's the cost
- # if get_finalized_command is called even when namespace_packages is
- # False?
- if not self.distribution.namespace_packages:
- return []
-
- install_cmd = self.get_finalized_command('install')
- svem = install_cmd.single_version_externally_managed
-
- return self.distribution.namespace_packages if svem else []
-
- @staticmethod
- def _gen_exclusion_paths():
- """
- Generate file paths to be excluded for namespace packages (bytecode
- cache files).
- """
- # always exclude the package module itself
- yield '__init__.py'
-
- yield '__init__.pyc'
- yield '__init__.pyo'
-
- if not hasattr(imp, 'get_tag'):
- return
-
- base = os.path.join('__pycache__', '__init__.' + imp.get_tag())
- yield base + '.pyc'
- yield base + '.pyo'
- yield base + '.opt-1.pyc'
- yield base + '.opt-2.pyc'
-
- def copy_tree(
- self, infile, outfile,
- preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
- ):
- assert preserve_mode and preserve_times and not preserve_symlinks
- exclude = self.get_exclusions()
-
- if not exclude:
- return orig.install_lib.copy_tree(self, infile, outfile)
-
- # Exclude namespace package __init__.py* files from the output
-
- from setuptools.archive_util import unpack_directory
- from distutils import log
-
- outfiles = []
-
- def pf(src, dst):
- if dst in exclude:
- log.warn("Skipping installation of %s (namespace package)",
- dst)
- return False
-
- log.info("copying %s -> %s", src, os.path.dirname(dst))
- outfiles.append(dst)
- return dst
-
- unpack_directory(infile, outfile, pf)
- return outfiles
-
- def get_outputs(self):
- outputs = orig.install_lib.get_outputs(self)
- exclude = self.get_exclusions()
- if exclude:
- return [f for f in outputs if f not in exclude]
- return outputs
diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py
deleted file mode 100755
index be66cb22..00000000
--- a/setuptools/command/install_scripts.py
+++ /dev/null
@@ -1,60 +0,0 @@
-from distutils import log
-import distutils.command.install_scripts as orig
-import os
-
-from pkg_resources import Distribution, PathMetadata, ensure_directory
-
-
-class install_scripts(orig.install_scripts):
- """Do normal script install, plus any egg_info wrapper scripts"""
-
- def initialize_options(self):
- orig.install_scripts.initialize_options(self)
- self.no_ep = False
-
- def run(self):
- import setuptools.command.easy_install as ei
-
- self.run_command("egg_info")
- if self.distribution.scripts:
- orig.install_scripts.run(self) # run first to set up self.outfiles
- else:
- self.outfiles = []
- if self.no_ep:
- # don't install entry point scripts into .egg file!
- return
-
- ei_cmd = self.get_finalized_command("egg_info")
- dist = Distribution(
- ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
- ei_cmd.egg_name, ei_cmd.egg_version,
- )
- bs_cmd = self.get_finalized_command('build_scripts')
- exec_param = getattr(bs_cmd, 'executable', None)
- bw_cmd = self.get_finalized_command("bdist_wininst")
- is_wininst = getattr(bw_cmd, '_is_running', False)
- writer = ei.ScriptWriter
- if is_wininst:
- exec_param = "python.exe"
- writer = ei.WindowsScriptWriter
- # resolve the writer to the environment
- writer = writer.best()
- cmd = writer.command_spec_class.best().from_param(exec_param)
- for args in writer.get_args(dist, cmd.as_header()):
- self.write_script(*args)
-
- def write_script(self, script_name, contents, mode="t", *ignored):
- """Write an executable file to the scripts directory"""
- from setuptools.command.easy_install import chmod, current_umask
-
- log.info("Installing %s script to %s", script_name, self.install_dir)
- target = os.path.join(self.install_dir, script_name)
- self.outfiles.append(target)
-
- mask = current_umask()
- if not self.dry_run:
- ensure_directory(target)
- f = open(target, "w" + mode)
- f.write(contents)
- f.close()
- chmod(target, 0o777 - mask)
diff --git a/setuptools/command/launcher manifest.xml b/setuptools/command/launcher manifest.xml
deleted file mode 100644
index 5972a96d..00000000
--- a/setuptools/command/launcher manifest.xml
+++ /dev/null
@@ -1,15 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
-<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
- <assemblyIdentity version="1.0.0.0"
- processorArchitecture="X86"
- name="%(name)s"
- type="win32"/>
- <!-- Identify the application security requirements. -->
- <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
- <security>
- <requestedPrivileges>
- <requestedExecutionLevel level="asInvoker" uiAccess="false"/>
- </requestedPrivileges>
- </security>
- </trustInfo>
-</assembly>
diff --git a/setuptools/command/register.py b/setuptools/command/register.py
deleted file mode 100755
index 8d6336a1..00000000
--- a/setuptools/command/register.py
+++ /dev/null
@@ -1,10 +0,0 @@
-import distutils.command.register as orig
-
-
-class register(orig.register):
- __doc__ = orig.register.__doc__
-
- def run(self):
- # Make sure that we are using valid current name/version info
- self.run_command('egg_info')
- orig.register.run(self)
diff --git a/setuptools/command/rotate.py b/setuptools/command/rotate.py
deleted file mode 100755
index b89353f5..00000000
--- a/setuptools/command/rotate.py
+++ /dev/null
@@ -1,66 +0,0 @@
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import DistutilsOptionError
-import os
-import shutil
-
-from setuptools.extern import six
-
-from setuptools import Command
-
-
-class rotate(Command):
- """Delete older distributions"""
-
- description = "delete older distributions, keeping N newest files"
- user_options = [
- ('match=', 'm', "patterns to match (required)"),
- ('dist-dir=', 'd', "directory where the distributions are"),
- ('keep=', 'k', "number of matching distributions to keep"),
- ]
-
- boolean_options = []
-
- def initialize_options(self):
- self.match = None
- self.dist_dir = None
- self.keep = None
-
- def finalize_options(self):
- if self.match is None:
- raise DistutilsOptionError(
- "Must specify one or more (comma-separated) match patterns "
- "(e.g. '.zip' or '.egg')"
- )
- if self.keep is None:
- raise DistutilsOptionError("Must specify number of files to keep")
- try:
- self.keep = int(self.keep)
- except ValueError:
- raise DistutilsOptionError("--keep must be an integer")
- if isinstance(self.match, six.string_types):
- self.match = [
- convert_path(p.strip()) for p in self.match.split(',')
- ]
- self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
-
- def run(self):
- self.run_command("egg_info")
- from glob import glob
-
- for pattern in self.match:
- pattern = self.distribution.get_name() + '*' + pattern
- files = glob(os.path.join(self.dist_dir, pattern))
- files = [(os.path.getmtime(f), f) for f in files]
- files.sort()
- files.reverse()
-
- log.info("%d file(s) matching %s", len(files), pattern)
- files = files[self.keep:]
- for (t, f) in files:
- log.info("Deleting %s", f)
- if not self.dry_run:
- if os.path.isdir(f):
- shutil.rmtree(f)
- else:
- os.unlink(f)
diff --git a/setuptools/command/saveopts.py b/setuptools/command/saveopts.py
deleted file mode 100755
index 611cec55..00000000
--- a/setuptools/command/saveopts.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from setuptools.command.setopt import edit_config, option_base
-
-
-class saveopts(option_base):
- """Save command-line options to a file"""
-
- description = "save supplied options to setup.cfg or other config file"
-
- def run(self):
- dist = self.distribution
- settings = {}
-
- for cmd in dist.command_options:
-
- if cmd == 'saveopts':
- continue # don't save our own options!
-
- for opt, (src, val) in dist.get_option_dict(cmd).items():
- if src == "command line":
- settings.setdefault(cmd, {})[opt] = val
-
- edit_config(self.filename, settings, self.dry_run)
diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py
deleted file mode 100755
index 6640d4e3..00000000
--- a/setuptools/command/sdist.py
+++ /dev/null
@@ -1,196 +0,0 @@
-from glob import glob
-from distutils import log
-import distutils.command.sdist as orig
-import os
-import sys
-import io
-
-from setuptools.extern import six
-
-from setuptools.utils import cs_path_exists
-
-import pkg_resources
-
-READMES = 'README', 'README.rst', 'README.txt'
-
-_default_revctrl = list
-
-def walk_revctrl(dirname=''):
- """Find all files under revision control"""
- for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
- for item in ep.load()(dirname):
- yield item
-
-
-class sdist(orig.sdist):
- """Smart sdist that finds anything supported by revision control"""
-
- user_options = [
- ('formats=', None,
- "formats for source distribution (comma-separated list)"),
- ('keep-temp', 'k',
- "keep the distribution tree around after creating " +
- "archive file(s)"),
- ('dist-dir=', 'd',
- "directory to put the source distribution archive(s) in "
- "[default: dist]"),
- ]
-
- negative_opt = {}
-
- def run(self):
- self.run_command('egg_info')
- ei_cmd = self.get_finalized_command('egg_info')
- self.filelist = ei_cmd.filelist
- self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))
- self.check_readme()
-
- # Run sub commands
- for cmd_name in self.get_sub_commands():
- self.run_command(cmd_name)
-
- # Call check_metadata only if no 'check' command
- # (distutils <= 2.6)
- import distutils.command
-
- if 'check' not in distutils.command.__all__:
- self.check_metadata()
-
- self.make_distribution()
-
- dist_files = getattr(self.distribution, 'dist_files', [])
- for file in self.archive_files:
- data = ('sdist', '', file)
- if data not in dist_files:
- dist_files.append(data)
-
- def __read_template_hack(self):
- # This grody hack closes the template file (MANIFEST.in) if an
- # exception occurs during read_template.
- # Doing so prevents an error when easy_install attempts to delete the
- # file.
- try:
- orig.sdist.read_template(self)
- except:
- _, _, tb = sys.exc_info()
- tb.tb_next.tb_frame.f_locals['template'].close()
- raise
-
- # Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
- # has been fixed, so only override the method if we're using an earlier
- # Python.
- has_leaky_handle = (
- sys.version_info < (2, 7, 2)
- or (3, 0) <= sys.version_info < (3, 1, 4)
- or (3, 2) <= sys.version_info < (3, 2, 1)
- )
- if has_leaky_handle:
- read_template = __read_template_hack
-
- def add_defaults(self):
- standards = [READMES,
- self.distribution.script_name]
- for fn in standards:
- if isinstance(fn, tuple):
- alts = fn
- got_it = 0
- for fn in alts:
- if cs_path_exists(fn):
- got_it = 1
- self.filelist.append(fn)
- break
-
- if not got_it:
- self.warn("standard file not found: should have one of " +
- ', '.join(alts))
- else:
- if cs_path_exists(fn):
- self.filelist.append(fn)
- else:
- self.warn("standard file '%s' not found" % fn)
-
- optional = ['test/test*.py', 'setup.cfg']
- for pattern in optional:
- files = list(filter(cs_path_exists, glob(pattern)))
- if files:
- self.filelist.extend(files)
-
- # getting python files
- if self.distribution.has_pure_modules():
- build_py = self.get_finalized_command('build_py')
- self.filelist.extend(build_py.get_source_files())
- # This functionality is incompatible with include_package_data, and
- # will in fact create an infinite recursion if include_package_data
- # is True. Use of include_package_data will imply that
- # distutils-style automatic handling of package_data is disabled
- if not self.distribution.include_package_data:
- for _, src_dir, _, filenames in build_py.data_files:
- self.filelist.extend([os.path.join(src_dir, filename)
- for filename in filenames])
-
- if self.distribution.has_ext_modules():
- build_ext = self.get_finalized_command('build_ext')
- self.filelist.extend(build_ext.get_source_files())
-
- if self.distribution.has_c_libraries():
- build_clib = self.get_finalized_command('build_clib')
- self.filelist.extend(build_clib.get_source_files())
-
- if self.distribution.has_scripts():
- build_scripts = self.get_finalized_command('build_scripts')
- self.filelist.extend(build_scripts.get_source_files())
-
- def check_readme(self):
- for f in READMES:
- if os.path.exists(f):
- return
- else:
- self.warn(
- "standard file not found: should have one of " +
- ', '.join(READMES)
- )
-
- def make_release_tree(self, base_dir, files):
- orig.sdist.make_release_tree(self, base_dir, files)
-
- # Save any egg_info command line options used to create this sdist
- dest = os.path.join(base_dir, 'setup.cfg')
- if hasattr(os, 'link') and os.path.exists(dest):
- # unlink and re-copy, since it might be hard-linked, and
- # we don't want to change the source version
- os.unlink(dest)
- self.copy_file('setup.cfg', dest)
-
- self.get_finalized_command('egg_info').save_version_info(dest)
-
- def _manifest_is_not_generated(self):
- # check for special comment used in 2.7.1 and higher
- if not os.path.isfile(self.manifest):
- return False
-
- with io.open(self.manifest, 'rb') as fp:
- first_line = fp.readline()
- return (first_line !=
- '# file GENERATED by distutils, do NOT edit\n'.encode())
-
- def read_manifest(self):
- """Read the manifest file (named by 'self.manifest') and use it to
- fill in 'self.filelist', the list of files to include in the source
- distribution.
- """
- log.info("reading manifest file '%s'", self.manifest)
- manifest = open(self.manifest, 'rbU')
- for line in manifest:
- # The manifest must contain UTF-8. See #303.
- if six.PY3:
- try:
- line = line.decode('UTF-8')
- except UnicodeDecodeError:
- log.warn("%r not UTF-8 decodable -- skipping" % line)
- continue
- # ignore comments and blank lines
- line = line.strip()
- if line.startswith('#') or not line:
- continue
- self.filelist.append(line)
- manifest.close()
diff --git a/setuptools/command/setopt.py b/setuptools/command/setopt.py
deleted file mode 100755
index 7f332be5..00000000
--- a/setuptools/command/setopt.py
+++ /dev/null
@@ -1,150 +0,0 @@
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import DistutilsOptionError
-import distutils
-import os
-
-from setuptools.extern.six.moves import configparser
-
-from setuptools import Command
-
-
-__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
-
-
-def config_file(kind="local"):
- """Get the filename of the distutils, local, global, or per-user config
-
- `kind` must be one of "local", "global", or "user"
- """
- if kind == 'local':
- return 'setup.cfg'
- if kind == 'global':
- return os.path.join(
- os.path.dirname(distutils.__file__), 'distutils.cfg'
- )
- if kind == 'user':
- dot = os.name == 'posix' and '.' or ''
- return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
- raise ValueError(
- "config_file() type must be 'local', 'global', or 'user'", kind
- )
-
-
-def edit_config(filename, settings, dry_run=False):
- """Edit a configuration file to include `settings`
-
- `settings` is a dictionary of dictionaries or ``None`` values, keyed by
- command/section name. A ``None`` value means to delete the entire section,
- while a dictionary lists settings to be changed or deleted in that section.
- A setting of ``None`` means to delete that setting.
- """
- log.debug("Reading configuration from %s", filename)
- opts = configparser.RawConfigParser()
- opts.read([filename])
- for section, options in settings.items():
- if options is None:
- log.info("Deleting section [%s] from %s", section, filename)
- opts.remove_section(section)
- else:
- if not opts.has_section(section):
- log.debug("Adding new section [%s] to %s", section, filename)
- opts.add_section(section)
- for option, value in options.items():
- if value is None:
- log.debug(
- "Deleting %s.%s from %s",
- section, option, filename
- )
- opts.remove_option(section, option)
- if not opts.options(section):
- log.info("Deleting empty [%s] section from %s",
- section, filename)
- opts.remove_section(section)
- else:
- log.debug(
- "Setting %s.%s to %r in %s",
- section, option, value, filename
- )
- opts.set(section, option, value)
-
- log.info("Writing %s", filename)
- if not dry_run:
- with open(filename, 'w') as f:
- opts.write(f)
-
-
-class option_base(Command):
- """Abstract base class for commands that mess with config files"""
-
- user_options = [
- ('global-config', 'g',
- "save options to the site-wide distutils.cfg file"),
- ('user-config', 'u',
- "save options to the current user's pydistutils.cfg file"),
- ('filename=', 'f',
- "configuration file to use (default=setup.cfg)"),
- ]
-
- boolean_options = [
- 'global-config', 'user-config',
- ]
-
- def initialize_options(self):
- self.global_config = None
- self.user_config = None
- self.filename = None
-
- def finalize_options(self):
- filenames = []
- if self.global_config:
- filenames.append(config_file('global'))
- if self.user_config:
- filenames.append(config_file('user'))
- if self.filename is not None:
- filenames.append(self.filename)
- if not filenames:
- filenames.append(config_file('local'))
- if len(filenames) > 1:
- raise DistutilsOptionError(
- "Must specify only one configuration file option",
- filenames
- )
- self.filename, = filenames
-
-
-class setopt(option_base):
- """Save command-line options to a file"""
-
- description = "set an option in setup.cfg or another config file"
-
- user_options = [
- ('command=', 'c', 'command to set an option for'),
- ('option=', 'o', 'option to set'),
- ('set-value=', 's', 'value of the option'),
- ('remove', 'r', 'remove (unset) the value'),
- ] + option_base.user_options
-
- boolean_options = option_base.boolean_options + ['remove']
-
- def initialize_options(self):
- option_base.initialize_options(self)
- self.command = None
- self.option = None
- self.set_value = None
- self.remove = None
-
- def finalize_options(self):
- option_base.finalize_options(self)
- if self.command is None or self.option is None:
- raise DistutilsOptionError("Must specify --command *and* --option")
- if self.set_value is None and not self.remove:
- raise DistutilsOptionError("Must specify --set-value or --remove")
-
- def run(self):
- edit_config(
- self.filename, {
- self.command: {self.option.replace('-', '_'): self.set_value}
- },
- self.dry_run
- )
diff --git a/setuptools/command/test.py b/setuptools/command/test.py
deleted file mode 100644
index 371e913b..00000000
--- a/setuptools/command/test.py
+++ /dev/null
@@ -1,196 +0,0 @@
-from distutils.errors import DistutilsOptionError
-from unittest import TestLoader
-import sys
-
-from setuptools.extern import six
-from setuptools.extern.six.moves import map
-
-from pkg_resources import (resource_listdir, resource_exists, normalize_path,
- working_set, _namespace_packages,
- add_activation_listener, require, EntryPoint)
-from setuptools import Command
-from setuptools.py31compat import unittest_main
-
-
-class ScanningLoader(TestLoader):
- def loadTestsFromModule(self, module, pattern=None):
- """Return a suite of all tests cases contained in the given module
-
- If the module is a package, load tests from all the modules in it.
- If the module has an ``additional_tests`` function, call it and add
- the return value to the tests.
- """
- tests = []
- tests.append(TestLoader.loadTestsFromModule(self, module))
-
- if hasattr(module, "additional_tests"):
- tests.append(module.additional_tests())
-
- if hasattr(module, '__path__'):
- for file in resource_listdir(module.__name__, ''):
- if file.endswith('.py') and file != '__init__.py':
- submodule = module.__name__ + '.' + file[:-3]
- else:
- if resource_exists(module.__name__, file + '/__init__.py'):
- submodule = module.__name__ + '.' + file
- else:
- continue
- tests.append(self.loadTestsFromName(submodule))
-
- if len(tests) != 1:
- return self.suiteClass(tests)
- else:
- return tests[0] # don't create a nested suite for only one return
-
-
-# adapted from jaraco.classes.properties:NonDataProperty
-class NonDataProperty(object):
- def __init__(self, fget):
- self.fget = fget
-
- def __get__(self, obj, objtype=None):
- if obj is None:
- return self
- return self.fget(obj)
-
-
-class test(Command):
- """Command to run unit tests after in-place build"""
-
- description = "run unit tests after in-place build"
-
- user_options = [
- ('test-module=', 'm', "Run 'test_suite' in specified module"),
- ('test-suite=', 's',
- "Test suite to run (e.g. 'some_module.test_suite')"),
- ('test-runner=', 'r', "Test runner to use"),
- ]
-
- def initialize_options(self):
- self.test_suite = None
- self.test_module = None
- self.test_loader = None
- self.test_runner = None
-
- def finalize_options(self):
-
- if self.test_suite and self.test_module:
- msg = "You may specify a module or a suite, but not both"
- raise DistutilsOptionError(msg)
-
- if self.test_suite is None:
- if self.test_module is None:
- self.test_suite = self.distribution.test_suite
- else:
- self.test_suite = self.test_module + ".test_suite"
-
- if self.test_loader is None:
- self.test_loader = getattr(self.distribution, 'test_loader', None)
- if self.test_loader is None:
- self.test_loader = "setuptools.command.test:ScanningLoader"
- if self.test_runner is None:
- self.test_runner = getattr(self.distribution, 'test_runner', None)
-
- @NonDataProperty
- def test_args(self):
- return list(self._test_args())
-
- def _test_args(self):
- if self.verbose:
- yield '--verbose'
- if self.test_suite:
- yield self.test_suite
-
- def with_project_on_sys_path(self, func):
- with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False)
-
- if with_2to3:
- # If we run 2to3 we can not do this inplace:
-
- # Ensure metadata is up-to-date
- self.reinitialize_command('build_py', inplace=0)
- self.run_command('build_py')
- bpy_cmd = self.get_finalized_command("build_py")
- build_path = normalize_path(bpy_cmd.build_lib)
-
- # Build extensions
- self.reinitialize_command('egg_info', egg_base=build_path)
- self.run_command('egg_info')
-
- self.reinitialize_command('build_ext', inplace=0)
- self.run_command('build_ext')
- else:
- # Without 2to3 inplace works fine:
- self.run_command('egg_info')
-
- # Build extensions in-place
- self.reinitialize_command('build_ext', inplace=1)
- self.run_command('build_ext')
-
- ei_cmd = self.get_finalized_command("egg_info")
-
- old_path = sys.path[:]
- old_modules = sys.modules.copy()
-
- try:
- sys.path.insert(0, normalize_path(ei_cmd.egg_base))
- working_set.__init__()
- add_activation_listener(lambda dist: dist.activate())
- require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
- func()
- finally:
- sys.path[:] = old_path
- sys.modules.clear()
- sys.modules.update(old_modules)
- working_set.__init__()
-
- def run(self):
- if self.distribution.install_requires:
- self.distribution.fetch_build_eggs(
- self.distribution.install_requires)
- if self.distribution.tests_require:
- self.distribution.fetch_build_eggs(self.distribution.tests_require)
-
- cmd = ' '.join(self._argv)
- if self.dry_run:
- self.announce('skipping "%s" (dry run)' % cmd)
- else:
- self.announce('running "%s"' % cmd)
- self.with_project_on_sys_path(self.run_tests)
-
- def run_tests(self):
- # Purge modules under test from sys.modules. The test loader will
- # re-import them from the build location. Required when 2to3 is used
- # with namespace packages.
- if six.PY3 and getattr(self.distribution, 'use_2to3', False):
- module = self.test_suite.split('.')[0]
- if module in _namespace_packages:
- del_modules = []
- if module in sys.modules:
- del_modules.append(module)
- module += '.'
- for name in sys.modules:
- if name.startswith(module):
- del_modules.append(name)
- list(map(sys.modules.__delitem__, del_modules))
-
- unittest_main(
- None, None, self._argv,
- testLoader=self._resolve_as_ep(self.test_loader),
- testRunner=self._resolve_as_ep(self.test_runner),
- )
-
- @property
- def _argv(self):
- return ['unittest'] + self.test_args
-
- @staticmethod
- def _resolve_as_ep(val):
- """
- Load the indicated attribute value, called, as a as if it were
- specified as an entry point.
- """
- if val is None:
- return
- parsed = EntryPoint.parse("x=" + val)
- return parsed.resolve()()
diff --git a/setuptools/command/upload.py b/setuptools/command/upload.py
deleted file mode 100644
index 484baa5a..00000000
--- a/setuptools/command/upload.py
+++ /dev/null
@@ -1,38 +0,0 @@
-import getpass
-from distutils.command import upload as orig
-
-
-class upload(orig.upload):
- """
- Override default upload behavior to obtain password
- in a variety of different ways.
- """
-
- def finalize_options(self):
- orig.upload.finalize_options(self)
- # Attempt to obtain password. Short circuit evaluation at the first
- # sign of success.
- self.password = (
- self.password or
- self._load_password_from_keyring() or
- self._prompt_for_password()
- )
-
- def _load_password_from_keyring(self):
- """
- Attempt to load password from keyring. Suppress Exceptions.
- """
- try:
- keyring = __import__('keyring')
- return keyring.get_password(self.repository, self.username)
- except Exception:
- pass
-
- def _prompt_for_password(self):
- """
- Prompt for a password on the tty. Suppress Exceptions.
- """
- try:
- return getpass.getpass()
- except (Exception, KeyboardInterrupt):
- pass
diff --git a/setuptools/command/upload_docs.py b/setuptools/command/upload_docs.py
deleted file mode 100644
index f887b47e..00000000
--- a/setuptools/command/upload_docs.py
+++ /dev/null
@@ -1,191 +0,0 @@
-# -*- coding: utf-8 -*-
-"""upload_docs
-
-Implements a Distutils 'upload_docs' subcommand (upload documentation to
-PyPI's pythonhosted.org).
-"""
-
-from base64 import standard_b64encode
-from distutils import log
-from distutils.errors import DistutilsOptionError
-import os
-import socket
-import zipfile
-import tempfile
-import shutil
-
-from setuptools.extern import six
-from setuptools.extern.six.moves import http_client, urllib
-
-from pkg_resources import iter_entry_points
-from .upload import upload
-
-
-errors = 'surrogateescape' if six.PY3 else 'strict'
-
-
-# This is not just a replacement for byte literals
-# but works as a general purpose encoder
-def b(s, encoding='utf-8'):
- if isinstance(s, six.text_type):
- return s.encode(encoding, errors)
- return s
-
-
-class upload_docs(upload):
- description = 'Upload documentation to PyPI'
-
- user_options = [
- ('repository=', 'r',
- "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY),
- ('show-response', None,
- 'display full response text from server'),
- ('upload-dir=', None, 'directory to upload'),
- ]
- boolean_options = upload.boolean_options
-
- def has_sphinx(self):
- if self.upload_dir is None:
- for ep in iter_entry_points('distutils.commands', 'build_sphinx'):
- return True
-
- sub_commands = [('build_sphinx', has_sphinx)]
-
- def initialize_options(self):
- upload.initialize_options(self)
- self.upload_dir = None
- self.target_dir = None
-
- def finalize_options(self):
- upload.finalize_options(self)
- if self.upload_dir is None:
- if self.has_sphinx():
- build_sphinx = self.get_finalized_command('build_sphinx')
- self.target_dir = build_sphinx.builder_target_dir
- else:
- build = self.get_finalized_command('build')
- self.target_dir = os.path.join(build.build_base, 'docs')
- else:
- self.ensure_dirname('upload_dir')
- self.target_dir = self.upload_dir
- self.announce('Using upload directory %s' % self.target_dir)
-
- def create_zipfile(self, filename):
- zip_file = zipfile.ZipFile(filename, "w")
- try:
- self.mkpath(self.target_dir) # just in case
- for root, dirs, files in os.walk(self.target_dir):
- if root == self.target_dir and not files:
- raise DistutilsOptionError(
- "no files found in upload directory '%s'"
- % self.target_dir)
- for name in files:
- full = os.path.join(root, name)
- relative = root[len(self.target_dir):].lstrip(os.path.sep)
- dest = os.path.join(relative, name)
- zip_file.write(full, dest)
- finally:
- zip_file.close()
-
- def run(self):
- # Run sub commands
- for cmd_name in self.get_sub_commands():
- self.run_command(cmd_name)
-
- tmp_dir = tempfile.mkdtemp()
- name = self.distribution.metadata.get_name()
- zip_file = os.path.join(tmp_dir, "%s.zip" % name)
- try:
- self.create_zipfile(zip_file)
- self.upload_file(zip_file)
- finally:
- shutil.rmtree(tmp_dir)
-
- def upload_file(self, filename):
- f = open(filename, 'rb')
- content = f.read()
- f.close()
- meta = self.distribution.metadata
- data = {
- ':action': 'doc_upload',
- 'name': meta.get_name(),
- 'content': (os.path.basename(filename), content),
- }
- # set up the authentication
- credentials = b(self.username + ':' + self.password)
- credentials = standard_b64encode(credentials)
- if six.PY3:
- credentials = credentials.decode('ascii')
- auth = "Basic " + credentials
-
- # Build up the MIME payload for the POST data
- boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
- sep_boundary = b('\n--') + b(boundary)
- end_boundary = sep_boundary + b('--')
- body = []
- for key, values in six.iteritems(data):
- title = '\nContent-Disposition: form-data; name="%s"' % key
- # handle multiple entries for the same name
- if not isinstance(values, list):
- values = [values]
- for value in values:
- if type(value) is tuple:
- title += '; filename="%s"' % value[0]
- value = value[1]
- else:
- value = b(value)
- body.append(sep_boundary)
- body.append(b(title))
- body.append(b("\n\n"))
- body.append(value)
- if value and value[-1:] == b('\r'):
- body.append(b('\n')) # write an extra newline (lurve Macs)
- body.append(end_boundary)
- body.append(b("\n"))
- body = b('').join(body)
-
- self.announce("Submitting documentation to %s" % (self.repository),
- log.INFO)
-
- # build the Request
- # We can't use urllib2 since we need to send the Basic
- # auth right with the first request
- schema, netloc, url, params, query, fragments = \
- urllib.parse.urlparse(self.repository)
- assert not params and not query and not fragments
- if schema == 'http':
- conn = http_client.HTTPConnection(netloc)
- elif schema == 'https':
- conn = http_client.HTTPSConnection(netloc)
- else:
- raise AssertionError("unsupported schema " + schema)
-
- data = ''
- try:
- conn.connect()
- conn.putrequest("POST", url)
- content_type = 'multipart/form-data; boundary=%s' % boundary
- conn.putheader('Content-type', content_type)
- conn.putheader('Content-length', str(len(body)))
- conn.putheader('Authorization', auth)
- conn.endheaders()
- conn.send(body)
- except socket.error as e:
- self.announce(str(e), log.ERROR)
- return
-
- r = conn.getresponse()
- if r.status == 200:
- self.announce('Server response (%s): %s' % (r.status, r.reason),
- log.INFO)
- elif r.status == 301:
- location = r.getheader('Location')
- if location is None:
- location = 'https://pythonhosted.org/%s/' % meta.get_name()
- self.announce('Upload successful. Visit %s' % location,
- log.INFO)
- else:
- self.announce('Upload failed (%s): %s' % (r.status, r.reason),
- log.ERROR)
- if self.show_response:
- print('-' * 75, r.read(), '-' * 75)
diff --git a/setuptools/depends.py b/setuptools/depends.py
deleted file mode 100644
index 9f7c9a35..00000000
--- a/setuptools/depends.py
+++ /dev/null
@@ -1,217 +0,0 @@
-import sys
-import imp
-import marshal
-from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
-from distutils.version import StrictVersion
-
-from setuptools.extern import six
-
-__all__ = [
- 'Require', 'find_module', 'get_module_constant', 'extract_constant'
-]
-
-class Require:
- """A prerequisite to building or installing a distribution"""
-
- def __init__(self, name, requested_version, module, homepage='',
- attribute=None, format=None):
-
- if format is None and requested_version is not None:
- format = StrictVersion
-
- if format is not None:
- requested_version = format(requested_version)
- if attribute is None:
- attribute = '__version__'
-
- self.__dict__.update(locals())
- del self.self
-
- def full_name(self):
- """Return full package/distribution name, w/version"""
- if self.requested_version is not None:
- return '%s-%s' % (self.name,self.requested_version)
- return self.name
-
- def version_ok(self, version):
- """Is 'version' sufficiently up-to-date?"""
- return self.attribute is None or self.format is None or \
- str(version) != "unknown" and version >= self.requested_version
-
- def get_version(self, paths=None, default="unknown"):
-
- """Get version number of installed module, 'None', or 'default'
-
- Search 'paths' for module. If not found, return 'None'. If found,
- return the extracted version attribute, or 'default' if no version
- attribute was specified, or the value cannot be determined without
- importing the module. The version is formatted according to the
- requirement's version format (if any), unless it is 'None' or the
- supplied 'default'.
- """
-
- if self.attribute is None:
- try:
- f,p,i = find_module(self.module,paths)
- if f: f.close()
- return default
- except ImportError:
- return None
-
- v = get_module_constant(self.module, self.attribute, default, paths)
-
- if v is not None and v is not default and self.format is not None:
- return self.format(v)
-
- return v
-
- def is_present(self, paths=None):
- """Return true if dependency is present on 'paths'"""
- return self.get_version(paths) is not None
-
- def is_current(self, paths=None):
- """Return true if dependency is present and up-to-date on 'paths'"""
- version = self.get_version(paths)
- if version is None:
- return False
- return self.version_ok(version)
-
-
-def _iter_code(code):
-
- """Yield '(op,arg)' pair for each operation in code object 'code'"""
-
- from array import array
- from dis import HAVE_ARGUMENT, EXTENDED_ARG
-
- bytes = array('b',code.co_code)
- eof = len(code.co_code)
-
- ptr = 0
- extended_arg = 0
-
- while ptr<eof:
-
- op = bytes[ptr]
-
- if op>=HAVE_ARGUMENT:
-
- arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg
- ptr += 3
-
- if op==EXTENDED_ARG:
- long_type = six.integer_types[-1]
- extended_arg = arg * long_type(65536)
- continue
-
- else:
- arg = None
- ptr += 1
-
- yield op,arg
-
-
-def find_module(module, paths=None):
- """Just like 'imp.find_module()', but with package support"""
-
- parts = module.split('.')
-
- while parts:
- part = parts.pop(0)
- f, path, (suffix,mode,kind) = info = imp.find_module(part, paths)
-
- if kind==PKG_DIRECTORY:
- parts = parts or ['__init__']
- paths = [path]
-
- elif parts:
- raise ImportError("Can't find %r in %s" % (parts,module))
-
- return info
-
-
-def get_module_constant(module, symbol, default=-1, paths=None):
-
- """Find 'module' by searching 'paths', and extract 'symbol'
-
- Return 'None' if 'module' does not exist on 'paths', or it does not define
- 'symbol'. If the module defines 'symbol' as a constant, return the
- constant. Otherwise, return 'default'."""
-
- try:
- f, path, (suffix, mode, kind) = find_module(module, paths)
- except ImportError:
- # Module doesn't exist
- return None
-
- try:
- if kind==PY_COMPILED:
- f.read(8) # skip magic & date
- code = marshal.load(f)
- elif kind==PY_FROZEN:
- code = imp.get_frozen_object(module)
- elif kind==PY_SOURCE:
- code = compile(f.read(), path, 'exec')
- else:
- # Not something we can parse; we'll have to import it. :(
- if module not in sys.modules:
- imp.load_module(module, f, path, (suffix, mode, kind))
- return getattr(sys.modules[module], symbol, None)
-
- finally:
- if f:
- f.close()
-
- return extract_constant(code, symbol, default)
-
-
-def extract_constant(code, symbol, default=-1):
- """Extract the constant value of 'symbol' from 'code'
-
- If the name 'symbol' is bound to a constant value by the Python code
- object 'code', return that value. If 'symbol' is bound to an expression,
- return 'default'. Otherwise, return 'None'.
-
- Return value is based on the first assignment to 'symbol'. 'symbol' must
- be a global, or at least a non-"fast" local in the code block. That is,
- only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
- must be present in 'code.co_names'.
- """
-
- if symbol not in code.co_names:
- # name's not there, can't possibly be an assigment
- return None
-
- name_idx = list(code.co_names).index(symbol)
-
- STORE_NAME = 90
- STORE_GLOBAL = 97
- LOAD_CONST = 100
-
- const = default
-
- for op, arg in _iter_code(code):
-
- if op==LOAD_CONST:
- const = code.co_consts[arg]
- elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL):
- return const
- else:
- const = default
-
-
-def _update_globals():
- """
- Patch the globals to remove the objects not available on some platforms.
-
- XXX it'd be better to test assertions about bytecode instead.
- """
-
- if not sys.platform.startswith('java') and sys.platform != 'cli':
- return
- incompatible = 'extract_constant', 'get_module_constant'
- for name in incompatible:
- del globals()[name]
- __all__.remove(name)
-
-_update_globals()
diff --git a/setuptools/dist.py b/setuptools/dist.py
deleted file mode 100644
index 086e0a58..00000000
--- a/setuptools/dist.py
+++ /dev/null
@@ -1,872 +0,0 @@
-__all__ = ['Distribution']
-
-import re
-import os
-import sys
-import warnings
-import numbers
-import distutils.log
-import distutils.core
-import distutils.cmd
-import distutils.dist
-from distutils.core import Distribution as _Distribution
-from distutils.errors import (DistutilsOptionError, DistutilsPlatformError,
- DistutilsSetupError)
-
-from setuptools.extern import six
-from setuptools.extern.six.moves import map
-from pkg_resources.extern import packaging
-
-from setuptools.depends import Require
-from setuptools import windows_support
-import pkg_resources
-
-
-def _get_unpatched(cls):
- """Protect against re-patching the distutils if reloaded
-
- Also ensures that no other distutils extension monkeypatched the distutils
- first.
- """
- while cls.__module__.startswith('setuptools'):
- cls, = cls.__bases__
- if not cls.__module__.startswith('distutils'):
- raise AssertionError(
- "distutils has already been patched by %r" % cls
- )
- return cls
-
-_Distribution = _get_unpatched(_Distribution)
-
-def _patch_distribution_metadata_write_pkg_info():
- """
- Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-local
- encoding to save the pkg_info. Monkey-patch its write_pkg_info method to
- correct this undesirable behavior.
- """
- environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2)
- if not environment_local:
- return
-
- # from Python 3.4
- def write_pkg_info(self, base_dir):
- """Write the PKG-INFO file into the release tree.
- """
- with open(os.path.join(base_dir, 'PKG-INFO'), 'w',
- encoding='UTF-8') as pkg_info:
- self.write_pkg_file(pkg_info)
-
- distutils.dist.DistributionMetadata.write_pkg_info = write_pkg_info
-_patch_distribution_metadata_write_pkg_info()
-
-sequence = tuple, list
-
-def check_importable(dist, attr, value):
- try:
- ep = pkg_resources.EntryPoint.parse('x='+value)
- assert not ep.extras
- except (TypeError,ValueError,AttributeError,AssertionError):
- raise DistutilsSetupError(
- "%r must be importable 'module:attrs' string (got %r)"
- % (attr,value)
- )
-
-
-def assert_string_list(dist, attr, value):
- """Verify that value is a string list or None"""
- try:
- assert ''.join(value)!=value
- except (TypeError,ValueError,AttributeError,AssertionError):
- raise DistutilsSetupError(
- "%r must be a list of strings (got %r)" % (attr,value)
- )
-def check_nsp(dist, attr, value):
- """Verify that namespace packages are valid"""
- assert_string_list(dist,attr,value)
- for nsp in value:
- if not dist.has_contents_for(nsp):
- raise DistutilsSetupError(
- "Distribution contains no modules or packages for " +
- "namespace package %r" % nsp
- )
- if '.' in nsp:
- parent = '.'.join(nsp.split('.')[:-1])
- if parent not in value:
- distutils.log.warn(
- "WARNING: %r is declared as a package namespace, but %r"
- " is not: please correct this in setup.py", nsp, parent
- )
-
-def check_extras(dist, attr, value):
- """Verify that extras_require mapping is valid"""
- try:
- for k,v in value.items():
- if ':' in k:
- k,m = k.split(':',1)
- if pkg_resources.invalid_marker(m):
- raise DistutilsSetupError("Invalid environment marker: "+m)
- list(pkg_resources.parse_requirements(v))
- except (TypeError,ValueError,AttributeError):
- raise DistutilsSetupError(
- "'extras_require' must be a dictionary whose values are "
- "strings or lists of strings containing valid project/version "
- "requirement specifiers."
- )
-
-def assert_bool(dist, attr, value):
- """Verify that value is True, False, 0, or 1"""
- if bool(value) != value:
- tmpl = "{attr!r} must be a boolean value (got {value!r})"
- raise DistutilsSetupError(tmpl.format(attr=attr, value=value))
-
-
-def check_requirements(dist, attr, value):
- """Verify that install_requires is a valid requirements list"""
- try:
- list(pkg_resources.parse_requirements(value))
- except (TypeError, ValueError) as error:
- tmpl = (
- "{attr!r} must be a string or list of strings "
- "containing valid project/version requirement specifiers; {error}"
- )
- raise DistutilsSetupError(tmpl.format(attr=attr, error=error))
-
-def check_entry_points(dist, attr, value):
- """Verify that entry_points map is parseable"""
- try:
- pkg_resources.EntryPoint.parse_map(value)
- except ValueError as e:
- raise DistutilsSetupError(e)
-
-def check_test_suite(dist, attr, value):
- if not isinstance(value, six.string_types):
- raise DistutilsSetupError("test_suite must be a string")
-
-def check_package_data(dist, attr, value):
- """Verify that value is a dictionary of package names to glob lists"""
- if isinstance(value,dict):
- for k,v in value.items():
- if not isinstance(k,str): break
- try: iter(v)
- except TypeError:
- break
- else:
- return
- raise DistutilsSetupError(
- attr+" must be a dictionary mapping package names to lists of "
- "wildcard patterns"
- )
-
-def check_packages(dist, attr, value):
- for pkgname in value:
- if not re.match(r'\w+(\.\w+)*', pkgname):
- distutils.log.warn(
- "WARNING: %r not a valid package name; please use only "
- ".-separated package names in setup.py", pkgname
- )
-
-
-class Distribution(_Distribution):
- """Distribution with support for features, tests, and package data
-
- This is an enhanced version of 'distutils.dist.Distribution' that
- effectively adds the following new optional keyword arguments to 'setup()':
-
- 'install_requires' -- a string or sequence of strings specifying project
- versions that the distribution requires when installed, in the format
- used by 'pkg_resources.require()'. They will be installed
- automatically when the package is installed. If you wish to use
- packages that are not available in PyPI, or want to give your users an
- alternate download location, you can add a 'find_links' option to the
- '[easy_install]' section of your project's 'setup.cfg' file, and then
- setuptools will scan the listed web pages for links that satisfy the
- requirements.
-
- 'extras_require' -- a dictionary mapping names of optional "extras" to the
- additional requirement(s) that using those extras incurs. For example,
- this::
-
- extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])
-
- indicates that the distribution can optionally provide an extra
- capability called "reST", but it can only be used if docutils and
- reSTedit are installed. If the user installs your package using
- EasyInstall and requests one of your extras, the corresponding
- additional requirements will be installed if needed.
-
- 'features' **deprecated** -- a dictionary mapping option names to
- 'setuptools.Feature'
- objects. Features are a portion of the distribution that can be
- included or excluded based on user options, inter-feature dependencies,
- and availability on the current system. Excluded features are omitted
- from all setup commands, including source and binary distributions, so
- you can create multiple distributions from the same source tree.
- Feature names should be valid Python identifiers, except that they may
- contain the '-' (minus) sign. Features can be included or excluded
- via the command line options '--with-X' and '--without-X', where 'X' is
- the name of the feature. Whether a feature is included by default, and
- whether you are allowed to control this from the command line, is
- determined by the Feature object. See the 'Feature' class for more
- information.
-
- 'test_suite' -- the name of a test suite to run for the 'test' command.
- If the user runs 'python setup.py test', the package will be installed,
- and the named test suite will be run. The format is the same as
- would be used on a 'unittest.py' command line. That is, it is the
- dotted name of an object to import and call to generate a test suite.
-
- 'package_data' -- a dictionary mapping package names to lists of filenames
- or globs to use to find data files contained in the named packages.
- If the dictionary has filenames or globs listed under '""' (the empty
- string), those names will be searched for in every package, in addition
- to any names for the specific package. Data files found using these
- names/globs will be installed along with the package, in the same
- location as the package. Note that globs are allowed to reference
- the contents of non-package subdirectories, as long as you use '/' as
- a path separator. (Globs are automatically converted to
- platform-specific paths at runtime.)
-
- In addition to these new keywords, this class also has several new methods
- for manipulating the distribution's contents. For example, the 'include()'
- and 'exclude()' methods can be thought of as in-place add and subtract
- commands that add or remove packages, modules, extensions, and so on from
- the distribution. They are used by the feature subsystem to configure the
- distribution for the included and excluded features.
- """
-
- _patched_dist = None
-
- def patch_missing_pkg_info(self, attrs):
- # Fake up a replacement for the data that would normally come from
- # PKG-INFO, but which might not yet be built if this is a fresh
- # checkout.
- #
- if not attrs or 'name' not in attrs or 'version' not in attrs:
- return
- key = pkg_resources.safe_name(str(attrs['name'])).lower()
- dist = pkg_resources.working_set.by_key.get(key)
- if dist is not None and not dist.has_metadata('PKG-INFO'):
- dist._version = pkg_resources.safe_version(str(attrs['version']))
- self._patched_dist = dist
-
- def __init__(self, attrs=None):
- have_package_data = hasattr(self, "package_data")
- if not have_package_data:
- self.package_data = {}
- _attrs_dict = attrs or {}
- if 'features' in _attrs_dict or 'require_features' in _attrs_dict:
- Feature.warn_deprecated()
- self.require_features = []
- self.features = {}
- self.dist_files = []
- self.src_root = attrs and attrs.pop("src_root", None)
- self.patch_missing_pkg_info(attrs)
- # Make sure we have any eggs needed to interpret 'attrs'
- if attrs is not None:
- self.dependency_links = attrs.pop('dependency_links', [])
- assert_string_list(self,'dependency_links',self.dependency_links)
- if attrs and 'setup_requires' in attrs:
- self.fetch_build_eggs(attrs['setup_requires'])
- for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
- vars(self).setdefault(ep.name, None)
- _Distribution.__init__(self,attrs)
- if isinstance(self.metadata.version, numbers.Number):
- # Some people apparently take "version number" too literally :)
- self.metadata.version = str(self.metadata.version)
-
- if self.metadata.version is not None:
- try:
- ver = packaging.version.Version(self.metadata.version)
- normalized_version = str(ver)
- if self.metadata.version != normalized_version:
- warnings.warn(
- "Normalizing '%s' to '%s'" % (
- self.metadata.version,
- normalized_version,
- )
- )
- self.metadata.version = normalized_version
- except (packaging.version.InvalidVersion, TypeError):
- warnings.warn(
- "The version specified (%r) is an invalid version, this "
- "may not work as expected with newer versions of "
- "setuptools, pip, and PyPI. Please see PEP 440 for more "
- "details." % self.metadata.version
- )
-
- def parse_command_line(self):
- """Process features after parsing command line options"""
- result = _Distribution.parse_command_line(self)
- if self.features:
- self._finalize_features()
- return result
-
- def _feature_attrname(self,name):
- """Convert feature name to corresponding option attribute name"""
- return 'with_'+name.replace('-','_')
-
- def fetch_build_eggs(self, requires):
- """Resolve pre-setup requirements"""
- resolved_dists = pkg_resources.working_set.resolve(
- pkg_resources.parse_requirements(requires),
- installer=self.fetch_build_egg,
- replace_conflicting=True,
- )
- for dist in resolved_dists:
- pkg_resources.working_set.add(dist, replace=True)
-
- def finalize_options(self):
- _Distribution.finalize_options(self)
- if self.features:
- self._set_global_opts_from_features()
-
- for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
- value = getattr(self,ep.name,None)
- if value is not None:
- ep.require(installer=self.fetch_build_egg)
- ep.load()(self, ep.name, value)
- if getattr(self, 'convert_2to3_doctests', None):
- # XXX may convert to set here when we can rely on set being builtin
- self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests]
- else:
- self.convert_2to3_doctests = []
-
- def get_egg_cache_dir(self):
- egg_cache_dir = os.path.join(os.curdir, '.eggs')
- if not os.path.exists(egg_cache_dir):
- os.mkdir(egg_cache_dir)
- windows_support.hide_file(egg_cache_dir)
- readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt')
- with open(readme_txt_filename, 'w') as f:
- f.write('This directory contains eggs that were downloaded '
- 'by setuptools to build, test, and run plug-ins.\n\n')
- f.write('This directory caches those eggs to prevent '
- 'repeated downloads.\n\n')
- f.write('However, it is safe to delete this directory.\n\n')
-
- return egg_cache_dir
-
- def fetch_build_egg(self, req):
- """Fetch an egg needed for building"""
-
- try:
- cmd = self._egg_fetcher
- cmd.package_index.to_scan = []
- except AttributeError:
- from setuptools.command.easy_install import easy_install
- dist = self.__class__({'script_args':['easy_install']})
- dist.parse_config_files()
- opts = dist.get_option_dict('easy_install')
- keep = (
- 'find_links', 'site_dirs', 'index_url', 'optimize',
- 'site_dirs', 'allow_hosts'
- )
- for key in list(opts):
- if key not in keep:
- del opts[key] # don't use any other settings
- if self.dependency_links:
- links = self.dependency_links[:]
- if 'find_links' in opts:
- links = opts['find_links'][1].split() + links
- opts['find_links'] = ('setup', links)
- install_dir = self.get_egg_cache_dir()
- cmd = easy_install(
- dist, args=["x"], install_dir=install_dir, exclude_scripts=True,
- always_copy=False, build_directory=None, editable=False,
- upgrade=False, multi_version=True, no_report=True, user=False
- )
- cmd.ensure_finalized()
- self._egg_fetcher = cmd
- return cmd.easy_install(req)
-
- def _set_global_opts_from_features(self):
- """Add --with-X/--without-X options based on optional features"""
-
- go = []
- no = self.negative_opt.copy()
-
- for name,feature in self.features.items():
- self._set_feature(name,None)
- feature.validate(self)
-
- if feature.optional:
- descr = feature.description
- incdef = ' (default)'
- excdef=''
- if not feature.include_by_default():
- excdef, incdef = incdef, excdef
-
- go.append(('with-'+name, None, 'include '+descr+incdef))
- go.append(('without-'+name, None, 'exclude '+descr+excdef))
- no['without-'+name] = 'with-'+name
-
- self.global_options = self.feature_options = go + self.global_options
- self.negative_opt = self.feature_negopt = no
-
- def _finalize_features(self):
- """Add/remove features and resolve dependencies between them"""
-
- # First, flag all the enabled items (and thus their dependencies)
- for name,feature in self.features.items():
- enabled = self.feature_is_included(name)
- if enabled or (enabled is None and feature.include_by_default()):
- feature.include_in(self)
- self._set_feature(name,1)
-
- # Then disable the rest, so that off-by-default features don't
- # get flagged as errors when they're required by an enabled feature
- for name,feature in self.features.items():
- if not self.feature_is_included(name):
- feature.exclude_from(self)
- self._set_feature(name,0)
-
- def get_command_class(self, command):
- """Pluggable version of get_command_class()"""
- if command in self.cmdclass:
- return self.cmdclass[command]
-
- for ep in pkg_resources.iter_entry_points('distutils.commands',command):
- ep.require(installer=self.fetch_build_egg)
- self.cmdclass[command] = cmdclass = ep.load()
- return cmdclass
- else:
- return _Distribution.get_command_class(self, command)
-
- def print_commands(self):
- for ep in pkg_resources.iter_entry_points('distutils.commands'):
- if ep.name not in self.cmdclass:
- # don't require extras as the commands won't be invoked
- cmdclass = ep.resolve()
- self.cmdclass[ep.name] = cmdclass
- return _Distribution.print_commands(self)
-
- def get_command_list(self):
- for ep in pkg_resources.iter_entry_points('distutils.commands'):
- if ep.name not in self.cmdclass:
- # don't require extras as the commands won't be invoked
- cmdclass = ep.resolve()
- self.cmdclass[ep.name] = cmdclass
- return _Distribution.get_command_list(self)
-
- def _set_feature(self,name,status):
- """Set feature's inclusion status"""
- setattr(self,self._feature_attrname(name),status)
-
- def feature_is_included(self,name):
- """Return 1 if feature is included, 0 if excluded, 'None' if unknown"""
- return getattr(self,self._feature_attrname(name))
-
- def include_feature(self,name):
- """Request inclusion of feature named 'name'"""
-
- if self.feature_is_included(name)==0:
- descr = self.features[name].description
- raise DistutilsOptionError(
- descr + " is required, but was excluded or is not available"
- )
- self.features[name].include_in(self)
- self._set_feature(name,1)
-
- def include(self,**attrs):
- """Add items to distribution that are named in keyword arguments
-
- For example, 'dist.exclude(py_modules=["x"])' would add 'x' to
- the distribution's 'py_modules' attribute, if it was not already
- there.
-
- Currently, this method only supports inclusion for attributes that are
- lists or tuples. If you need to add support for adding to other
- attributes in this or a subclass, you can add an '_include_X' method,
- where 'X' is the name of the attribute. The method will be called with
- the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})'
- will try to call 'dist._include_foo({"bar":"baz"})', which can then
- handle whatever special inclusion logic is needed.
- """
- for k,v in attrs.items():
- include = getattr(self, '_include_'+k, None)
- if include:
- include(v)
- else:
- self._include_misc(k,v)
-
- def exclude_package(self,package):
- """Remove packages, modules, and extensions in named package"""
-
- pfx = package+'.'
- if self.packages:
- self.packages = [
- p for p in self.packages
- if p != package and not p.startswith(pfx)
- ]
-
- if self.py_modules:
- self.py_modules = [
- p for p in self.py_modules
- if p != package and not p.startswith(pfx)
- ]
-
- if self.ext_modules:
- self.ext_modules = [
- p for p in self.ext_modules
- if p.name != package and not p.name.startswith(pfx)
- ]
-
- def has_contents_for(self,package):
- """Return true if 'exclude_package(package)' would do something"""
-
- pfx = package+'.'
-
- for p in self.iter_distribution_names():
- if p==package or p.startswith(pfx):
- return True
-
- def _exclude_misc(self,name,value):
- """Handle 'exclude()' for list/tuple attrs without a special handler"""
- if not isinstance(value,sequence):
- raise DistutilsSetupError(
- "%s: setting must be a list or tuple (%r)" % (name, value)
- )
- try:
- old = getattr(self,name)
- except AttributeError:
- raise DistutilsSetupError(
- "%s: No such distribution setting" % name
- )
- if old is not None and not isinstance(old,sequence):
- raise DistutilsSetupError(
- name+": this setting cannot be changed via include/exclude"
- )
- elif old:
- setattr(self,name,[item for item in old if item not in value])
-
- def _include_misc(self,name,value):
- """Handle 'include()' for list/tuple attrs without a special handler"""
-
- if not isinstance(value,sequence):
- raise DistutilsSetupError(
- "%s: setting must be a list (%r)" % (name, value)
- )
- try:
- old = getattr(self,name)
- except AttributeError:
- raise DistutilsSetupError(
- "%s: No such distribution setting" % name
- )
- if old is None:
- setattr(self,name,value)
- elif not isinstance(old,sequence):
- raise DistutilsSetupError(
- name+": this setting cannot be changed via include/exclude"
- )
- else:
- setattr(self,name,old+[item for item in value if item not in old])
-
- def exclude(self,**attrs):
- """Remove items from distribution that are named in keyword arguments
-
- For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
- the distribution's 'py_modules' attribute. Excluding packages uses
- the 'exclude_package()' method, so all of the package's contained
- packages, modules, and extensions are also excluded.
-
- Currently, this method only supports exclusion from attributes that are
- lists or tuples. If you need to add support for excluding from other
- attributes in this or a subclass, you can add an '_exclude_X' method,
- where 'X' is the name of the attribute. The method will be called with
- the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'
- will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
- handle whatever special exclusion logic is needed.
- """
- for k,v in attrs.items():
- exclude = getattr(self, '_exclude_'+k, None)
- if exclude:
- exclude(v)
- else:
- self._exclude_misc(k,v)
-
- def _exclude_packages(self,packages):
- if not isinstance(packages,sequence):
- raise DistutilsSetupError(
- "packages: setting must be a list or tuple (%r)" % (packages,)
- )
- list(map(self.exclude_package, packages))
-
- def _parse_command_opts(self, parser, args):
- # Remove --with-X/--without-X options when processing command args
- self.global_options = self.__class__.global_options
- self.negative_opt = self.__class__.negative_opt
-
- # First, expand any aliases
- command = args[0]
- aliases = self.get_option_dict('aliases')
- while command in aliases:
- src,alias = aliases[command]
- del aliases[command] # ensure each alias can expand only once!
- import shlex
- args[:1] = shlex.split(alias,True)
- command = args[0]
-
- nargs = _Distribution._parse_command_opts(self, parser, args)
-
- # Handle commands that want to consume all remaining arguments
- cmd_class = self.get_command_class(command)
- if getattr(cmd_class,'command_consumes_arguments',None):
- self.get_option_dict(command)['args'] = ("command line", nargs)
- if nargs is not None:
- return []
-
- return nargs
-
- def get_cmdline_options(self):
- """Return a '{cmd: {opt:val}}' map of all command-line options
-
- Option names are all long, but do not include the leading '--', and
- contain dashes rather than underscores. If the option doesn't take
- an argument (e.g. '--quiet'), the 'val' is 'None'.
-
- Note that options provided by config files are intentionally excluded.
- """
-
- d = {}
-
- for cmd,opts in self.command_options.items():
-
- for opt,(src,val) in opts.items():
-
- if src != "command line":
- continue
-
- opt = opt.replace('_','-')
-
- if val==0:
- cmdobj = self.get_command_obj(cmd)
- neg_opt = self.negative_opt.copy()
- neg_opt.update(getattr(cmdobj,'negative_opt',{}))
- for neg,pos in neg_opt.items():
- if pos==opt:
- opt=neg
- val=None
- break
- else:
- raise AssertionError("Shouldn't be able to get here")
-
- elif val==1:
- val = None
-
- d.setdefault(cmd,{})[opt] = val
-
- return d
-
- def iter_distribution_names(self):
- """Yield all packages, modules, and extension names in distribution"""
-
- for pkg in self.packages or ():
- yield pkg
-
- for module in self.py_modules or ():
- yield module
-
- for ext in self.ext_modules or ():
- if isinstance(ext,tuple):
- name, buildinfo = ext
- else:
- name = ext.name
- if name.endswith('module'):
- name = name[:-6]
- yield name
-
- def handle_display_options(self, option_order):
- """If there were any non-global "display-only" options
- (--help-commands or the metadata display options) on the command
- line, display the requested info and return true; else return
- false.
- """
- import sys
-
- if six.PY2 or self.help_commands:
- return _Distribution.handle_display_options(self, option_order)
-
- # Stdout may be StringIO (e.g. in tests)
- import io
- if not isinstance(sys.stdout, io.TextIOWrapper):
- return _Distribution.handle_display_options(self, option_order)
-
- # Don't wrap stdout if utf-8 is already the encoding. Provides
- # workaround for #334.
- if sys.stdout.encoding.lower() in ('utf-8', 'utf8'):
- return _Distribution.handle_display_options(self, option_order)
-
- # Print metadata in UTF-8 no matter the platform
- encoding = sys.stdout.encoding
- errors = sys.stdout.errors
- newline = sys.platform != 'win32' and '\n' or None
- line_buffering = sys.stdout.line_buffering
-
- sys.stdout = io.TextIOWrapper(
- sys.stdout.detach(), 'utf-8', errors, newline, line_buffering)
- try:
- return _Distribution.handle_display_options(self, option_order)
- finally:
- sys.stdout = io.TextIOWrapper(
- sys.stdout.detach(), encoding, errors, newline, line_buffering)
-
-
-# Install it throughout the distutils
-for module in distutils.dist, distutils.core, distutils.cmd:
- module.Distribution = Distribution
-
-
-class Feature:
- """
- **deprecated** -- The `Feature` facility was never completely implemented
- or supported, `has reported issues
- <https://github.com/pypa/setuptools/issues/58>`_ and will be removed in
- a future version.
-
- A subset of the distribution that can be excluded if unneeded/wanted
-
- Features are created using these keyword arguments:
-
- 'description' -- a short, human readable description of the feature, to
- be used in error messages, and option help messages.
-
- 'standard' -- if true, the feature is included by default if it is
- available on the current system. Otherwise, the feature is only
- included if requested via a command line '--with-X' option, or if
- another included feature requires it. The default setting is 'False'.
-
- 'available' -- if true, the feature is available for installation on the
- current system. The default setting is 'True'.
-
- 'optional' -- if true, the feature's inclusion can be controlled from the
- command line, using the '--with-X' or '--without-X' options. If
- false, the feature's inclusion status is determined automatically,
- based on 'availabile', 'standard', and whether any other feature
- requires it. The default setting is 'True'.
-
- 'require_features' -- a string or sequence of strings naming features
- that should also be included if this feature is included. Defaults to
- empty list. May also contain 'Require' objects that should be
- added/removed from the distribution.
-
- 'remove' -- a string or list of strings naming packages to be removed
- from the distribution if this feature is *not* included. If the
- feature *is* included, this argument is ignored. This argument exists
- to support removing features that "crosscut" a distribution, such as
- defining a 'tests' feature that removes all the 'tests' subpackages
- provided by other features. The default for this argument is an empty
- list. (Note: the named package(s) or modules must exist in the base
- distribution when the 'setup()' function is initially called.)
-
- other keywords -- any other keyword arguments are saved, and passed to
- the distribution's 'include()' and 'exclude()' methods when the
- feature is included or excluded, respectively. So, for example, you
- could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be
- added or removed from the distribution as appropriate.
-
- A feature must include at least one 'requires', 'remove', or other
- keyword argument. Otherwise, it can't affect the distribution in any way.
- Note also that you can subclass 'Feature' to create your own specialized
- feature types that modify the distribution in other ways when included or
- excluded. See the docstrings for the various methods here for more detail.
- Aside from the methods, the only feature attributes that distributions look
- at are 'description' and 'optional'.
- """
-
- @staticmethod
- def warn_deprecated():
- warnings.warn(
- "Features are deprecated and will be removed in a future "
- "version. See https://github.com/pypa/setuptools/issues/65.",
- DeprecationWarning,
- stacklevel=3,
- )
-
- def __init__(self, description, standard=False, available=True,
- optional=True, require_features=(), remove=(), **extras):
- self.warn_deprecated()
-
- self.description = description
- self.standard = standard
- self.available = available
- self.optional = optional
- if isinstance(require_features,(str,Require)):
- require_features = require_features,
-
- self.require_features = [
- r for r in require_features if isinstance(r,str)
- ]
- er = [r for r in require_features if not isinstance(r,str)]
- if er: extras['require_features'] = er
-
- if isinstance(remove,str):
- remove = remove,
- self.remove = remove
- self.extras = extras
-
- if not remove and not require_features and not extras:
- raise DistutilsSetupError(
- "Feature %s: must define 'require_features', 'remove', or at least one"
- " of 'packages', 'py_modules', etc."
- )
-
- def include_by_default(self):
- """Should this feature be included by default?"""
- return self.available and self.standard
-
- def include_in(self,dist):
-
- """Ensure feature and its requirements are included in distribution
-
- You may override this in a subclass to perform additional operations on
- the distribution. Note that this method may be called more than once
- per feature, and so should be idempotent.
-
- """
-
- if not self.available:
- raise DistutilsPlatformError(
- self.description+" is required, "
- "but is not available on this platform"
- )
-
- dist.include(**self.extras)
-
- for f in self.require_features:
- dist.include_feature(f)
-
- def exclude_from(self,dist):
-
- """Ensure feature is excluded from distribution
-
- You may override this in a subclass to perform additional operations on
- the distribution. This method will be called at most once per
- feature, and only after all included features have been asked to
- include themselves.
- """
-
- dist.exclude(**self.extras)
-
- if self.remove:
- for item in self.remove:
- dist.exclude_package(item)
-
- def validate(self,dist):
-
- """Verify that feature makes sense in context of distribution
-
- This method is called by the distribution just before it parses its
- command line. It checks to ensure that the 'remove' attribute, if any,
- contains only valid package/module names that are present in the base
- distribution when 'setup()' is called. You may override it in a
- subclass to perform any other required validation of the feature
- against a target distribution.
- """
-
- for item in self.remove:
- if not dist.has_contents_for(item):
- raise DistutilsSetupError(
- "%s wants to be able to remove %s, but the distribution"
- " doesn't contain any packages or modules under %s"
- % (self.description, item, item)
- )
diff --git a/setuptools/extension.py b/setuptools/extension.py
deleted file mode 100644
index d10609b6..00000000
--- a/setuptools/extension.py
+++ /dev/null
@@ -1,57 +0,0 @@
-import sys
-import re
-import functools
-import distutils.core
-import distutils.errors
-import distutils.extension
-
-from setuptools.extern.six.moves import map
-
-from .dist import _get_unpatched
-from . import msvc9_support
-
-_Extension = _get_unpatched(distutils.core.Extension)
-
-msvc9_support.patch_for_specialized_compiler()
-
-def _have_cython():
- """
- Return True if Cython can be imported.
- """
- cython_impl = 'Cython.Distutils.build_ext',
- try:
- # from (cython_impl) import build_ext
- __import__(cython_impl, fromlist=['build_ext']).build_ext
- return True
- except Exception:
- pass
- return False
-
-# for compatibility
-have_pyrex = _have_cython
-
-
-class Extension(_Extension):
- """Extension that uses '.c' files in place of '.pyx' files"""
-
- def _convert_pyx_sources_to_lang(self):
- """
- Replace sources with .pyx extensions to sources with the target
- language extension. This mechanism allows language authors to supply
- pre-converted sources but to prefer the .pyx sources.
- """
- if _have_cython():
- # the build has Cython, so allow it to compile the .pyx files
- return
- lang = self.language or ''
- target_ext = '.cpp' if lang.lower() == 'c++' else '.c'
- sub = functools.partial(re.sub, '.pyx$', target_ext)
- self.sources = list(map(sub, self.sources))
-
-class Library(Extension):
- """Just like a regular Extension, but built as a library instead"""
-
-distutils.core.Extension = Extension
-distutils.extension.Extension = Extension
-if 'distutils.command.build_ext' in sys.modules:
- sys.modules['distutils.command.build_ext'].Extension = Extension
diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
deleted file mode 100644
index 6859aa5b..00000000
--- a/setuptools/extern/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-from pkg_resources.extern import VendorImporter
-
-
-names = 'six',
-VendorImporter(__name__, names, 'pkg_resources._vendor').install()
diff --git a/setuptools/gui-32.exe b/setuptools/gui-32.exe
deleted file mode 100644
index f8d35096..00000000
--- a/setuptools/gui-32.exe
+++ /dev/null
Binary files differ
diff --git a/setuptools/gui-64.exe b/setuptools/gui-64.exe
deleted file mode 100644
index 330c51a5..00000000
--- a/setuptools/gui-64.exe
+++ /dev/null
Binary files differ
diff --git a/setuptools/gui-arm-32.exe b/setuptools/gui-arm-32.exe
deleted file mode 100644
index 537aff37..00000000
--- a/setuptools/gui-arm-32.exe
+++ /dev/null
Binary files differ
diff --git a/setuptools/gui.exe b/setuptools/gui.exe
deleted file mode 100644
index f8d35096..00000000
--- a/setuptools/gui.exe
+++ /dev/null
Binary files differ
diff --git a/setuptools/launch.py b/setuptools/launch.py
deleted file mode 100644
index b05cbd2c..00000000
--- a/setuptools/launch.py
+++ /dev/null
@@ -1,35 +0,0 @@
-"""
-Launch the Python script on the command line after
-setuptools is bootstrapped via import.
-"""
-
-# Note that setuptools gets imported implicitly by the
-# invocation of this script using python -m setuptools.launch
-
-import tokenize
-import sys
-
-
-def run():
- """
- Run the script in sys.argv[1] as if it had
- been invoked naturally.
- """
- __builtins__
- script_name = sys.argv[1]
- namespace = dict(
- __file__ = script_name,
- __name__ = '__main__',
- __doc__ = None,
- )
- sys.argv[:] = sys.argv[1:]
-
- open_ = getattr(tokenize, 'open', open)
- script = open_(script_name).read()
- norm_script = script.replace('\\r\\n', '\\n')
- code = compile(norm_script, script_name, 'exec')
- exec(code, namespace)
-
-
-if __name__ == '__main__':
- run()
diff --git a/setuptools/lib2to3_ex.py b/setuptools/lib2to3_ex.py
deleted file mode 100644
index feef591a..00000000
--- a/setuptools/lib2to3_ex.py
+++ /dev/null
@@ -1,58 +0,0 @@
-"""
-Customized Mixin2to3 support:
-
- - adds support for converting doctests
-
-
-This module raises an ImportError on Python 2.
-"""
-
-from distutils.util import Mixin2to3 as _Mixin2to3
-from distutils import log
-from lib2to3.refactor import RefactoringTool, get_fixers_from_package
-import setuptools
-
-class DistutilsRefactoringTool(RefactoringTool):
- def log_error(self, msg, *args, **kw):
- log.error(msg, *args)
-
- def log_message(self, msg, *args):
- log.info(msg, *args)
-
- def log_debug(self, msg, *args):
- log.debug(msg, *args)
-
-class Mixin2to3(_Mixin2to3):
- def run_2to3(self, files, doctests = False):
- # See of the distribution option has been set, otherwise check the
- # setuptools default.
- if self.distribution.use_2to3 is not True:
- return
- if not files:
- return
- log.info("Fixing "+" ".join(files))
- self.__build_fixer_names()
- self.__exclude_fixers()
- if doctests:
- if setuptools.run_2to3_on_doctests:
- r = DistutilsRefactoringTool(self.fixer_names)
- r.refactor(files, write=True, doctests_only=True)
- else:
- _Mixin2to3.run_2to3(self, files)
-
- def __build_fixer_names(self):
- if self.fixer_names: return
- self.fixer_names = []
- for p in setuptools.lib2to3_fixer_packages:
- self.fixer_names.extend(get_fixers_from_package(p))
- if self.distribution.use_2to3_fixers is not None:
- for p in self.distribution.use_2to3_fixers:
- self.fixer_names.extend(get_fixers_from_package(p))
-
- def __exclude_fixers(self):
- excluded_fixers = getattr(self, 'exclude_fixers', [])
- if self.distribution.use_2to3_exclude_fixers is not None:
- excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers)
- for fixer_name in excluded_fixers:
- if fixer_name in self.fixer_names:
- self.fixer_names.remove(fixer_name)
diff --git a/setuptools/msvc9_support.py b/setuptools/msvc9_support.py
deleted file mode 100644
index 9d869580..00000000
--- a/setuptools/msvc9_support.py
+++ /dev/null
@@ -1,63 +0,0 @@
-try:
- import distutils.msvc9compiler
-except Exception:
- pass
-
-unpatched = dict()
-
-def patch_for_specialized_compiler():
- """
- Patch functions in distutils.msvc9compiler to use the standalone compiler
- build for Python (Windows only). Fall back to original behavior when the
- standalone compiler is not available.
- """
- if 'distutils' not in globals():
- # The module isn't available to be patched
- return
-
- if unpatched:
- # Already patched
- return
-
- unpatched.update(vars(distutils.msvc9compiler))
-
- distutils.msvc9compiler.find_vcvarsall = find_vcvarsall
- distutils.msvc9compiler.query_vcvarsall = query_vcvarsall
-
-def find_vcvarsall(version):
- Reg = distutils.msvc9compiler.Reg
- VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f'
- key = VC_BASE % ('', version)
- try:
- # Per-user installs register the compiler path here
- productdir = Reg.get_value(key, "installdir")
- except KeyError:
- try:
- # All-user installs on a 64-bit system register here
- key = VC_BASE % ('Wow6432Node\\', version)
- productdir = Reg.get_value(key, "installdir")
- except KeyError:
- productdir = None
-
- if productdir:
- import os
- vcvarsall = os.path.join(productdir, "vcvarsall.bat")
- if os.path.isfile(vcvarsall):
- return vcvarsall
-
- return unpatched['find_vcvarsall'](version)
-
-def query_vcvarsall(version, *args, **kwargs):
- try:
- return unpatched['query_vcvarsall'](version, *args, **kwargs)
- except distutils.errors.DistutilsPlatformError as exc:
- if exc and "vcvarsall.bat" in exc.args[0]:
- message = 'Microsoft Visual C++ %0.1f is required (%s).' % (version, exc.args[0])
- if int(version) == 9:
- # This redirection link is maintained by Microsoft.
- # Contact vspython@microsoft.com if it needs updating.
- raise distutils.errors.DistutilsPlatformError(
- message + ' Get it from http://aka.ms/vcpython27'
- )
- raise distutils.errors.DistutilsPlatformError(message)
- raise
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
deleted file mode 100755
index c53343e4..00000000
--- a/setuptools/package_index.py
+++ /dev/null
@@ -1,1069 +0,0 @@
-"""PyPI and direct package downloading"""
-import sys
-import os
-import re
-import shutil
-import socket
-import base64
-import hashlib
-import itertools
-from functools import wraps
-
-try:
- from urllib.parse import splituser
-except ImportError:
- from urllib2 import splituser
-
-from setuptools.extern import six
-from setuptools.extern.six.moves import urllib, http_client, configparser, map
-
-from pkg_resources import (
- CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST,
- require, Environment, find_distributions, safe_name, safe_version,
- to_filename, Requirement, DEVELOP_DIST,
-)
-from setuptools import ssl_support
-from distutils import log
-from distutils.errors import DistutilsError
-from fnmatch import translate
-from setuptools.py26compat import strip_fragment
-from setuptools.py27compat import get_all_headers
-
-EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
-HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
-# this is here to fix emacs' cruddy broken syntax highlighting
-PYPI_MD5 = re.compile(
- '<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a (?:title="MD5 hash"\n\s+)'
- 'href="[^?]+\?:action=show_md5&amp;digest=([0-9a-f]{32})">md5</a>\\)'
-)
-URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match
-EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
-
-__all__ = [
- 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst',
- 'interpret_distro_name',
-]
-
-_SOCKET_TIMEOUT = 15
-
-def parse_bdist_wininst(name):
- """Return (base,pyversion) or (None,None) for possible .exe name"""
-
- lower = name.lower()
- base, py_ver, plat = None, None, None
-
- if lower.endswith('.exe'):
- if lower.endswith('.win32.exe'):
- base = name[:-10]
- plat = 'win32'
- elif lower.startswith('.win32-py',-16):
- py_ver = name[-7:-4]
- base = name[:-16]
- plat = 'win32'
- elif lower.endswith('.win-amd64.exe'):
- base = name[:-14]
- plat = 'win-amd64'
- elif lower.startswith('.win-amd64-py',-20):
- py_ver = name[-7:-4]
- base = name[:-20]
- plat = 'win-amd64'
- return base,py_ver,plat
-
-
-def egg_info_for_url(url):
- parts = urllib.parse.urlparse(url)
- scheme, server, path, parameters, query, fragment = parts
- base = urllib.parse.unquote(path.split('/')[-1])
- if server=='sourceforge.net' and base=='download': # XXX Yuck
- base = urllib.parse.unquote(path.split('/')[-2])
- if '#' in base: base, fragment = base.split('#',1)
- return base,fragment
-
-def distros_for_url(url, metadata=None):
- """Yield egg or source distribution objects that might be found at a URL"""
- base, fragment = egg_info_for_url(url)
- for dist in distros_for_location(url, base, metadata): yield dist
- if fragment:
- match = EGG_FRAGMENT.match(fragment)
- if match:
- for dist in interpret_distro_name(
- url, match.group(1), metadata, precedence = CHECKOUT_DIST
- ):
- yield dist
-
-def distros_for_location(location, basename, metadata=None):
- """Yield egg or source distribution objects based on basename"""
- if basename.endswith('.egg.zip'):
- basename = basename[:-4] # strip the .zip
- if basename.endswith('.egg') and '-' in basename:
- # only one, unambiguous interpretation
- return [Distribution.from_location(location, basename, metadata)]
- if basename.endswith('.exe'):
- win_base, py_ver, platform = parse_bdist_wininst(basename)
- if win_base is not None:
- return interpret_distro_name(
- location, win_base, metadata, py_ver, BINARY_DIST, platform
- )
- # Try source distro extensions (.zip, .tgz, etc.)
- #
- for ext in EXTENSIONS:
- if basename.endswith(ext):
- basename = basename[:-len(ext)]
- return interpret_distro_name(location, basename, metadata)
- return [] # no extension matched
-
-def distros_for_filename(filename, metadata=None):
- """Yield possible egg or source distribution objects based on a filename"""
- return distros_for_location(
- normalize_path(filename), os.path.basename(filename), metadata
- )
-
-
-def interpret_distro_name(
- location, basename, metadata, py_version=None, precedence=SOURCE_DIST,
- platform=None
- ):
- """Generate alternative interpretations of a source distro name
-
- Note: if `location` is a filesystem filename, you should call
- ``pkg_resources.normalize_path()`` on it before passing it to this
- routine!
- """
- # Generate alternative interpretations of a source distro name
- # Because some packages are ambiguous as to name/versions split
- # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
- # So, we generate each possible interepretation (e.g. "adns, python-1.1.0"
- # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice,
- # the spurious interpretations should be ignored, because in the event
- # there's also an "adns" package, the spurious "python-1.1.0" version will
- # compare lower than any numeric version number, and is therefore unlikely
- # to match a request for it. It's still a potential problem, though, and
- # in the long run PyPI and the distutils should go for "safe" names and
- # versions in distribution archive names (sdist and bdist).
-
- parts = basename.split('-')
- if not py_version and any(re.match('py\d\.\d$', p) for p in parts[2:]):
- # it is a bdist_dumb, not an sdist -- bail out
- return
-
- for p in range(1,len(parts)+1):
- yield Distribution(
- location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]),
- py_version=py_version, precedence = precedence,
- platform = platform
- )
-
-# From Python 2.7 docs
-def unique_everseen(iterable, key=None):
- "List unique elements, preserving order. Remember all elements ever seen."
- # unique_everseen('AAAABBBCCDAABBB') --> A B C D
- # unique_everseen('ABBCcAD', str.lower) --> A B C D
- seen = set()
- seen_add = seen.add
- if key is None:
- for element in six.moves.filterfalse(seen.__contains__, iterable):
- seen_add(element)
- yield element
- else:
- for element in iterable:
- k = key(element)
- if k not in seen:
- seen_add(k)
- yield element
-
-def unique_values(func):
- """
- Wrap a function returning an iterable such that the resulting iterable
- only ever yields unique items.
- """
- @wraps(func)
- def wrapper(*args, **kwargs):
- return unique_everseen(func(*args, **kwargs))
- return wrapper
-
-REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
-# this line is here to fix emacs' cruddy broken syntax highlighting
-
-@unique_values
-def find_external_links(url, page):
- """Find rel="homepage" and rel="download" links in `page`, yielding URLs"""
-
- for match in REL.finditer(page):
- tag, rel = match.groups()
- rels = set(map(str.strip, rel.lower().split(',')))
- if 'homepage' in rels or 'download' in rels:
- for match in HREF.finditer(tag):
- yield urllib.parse.urljoin(url, htmldecode(match.group(1)))
-
- for tag in ("<th>Home Page", "<th>Download URL"):
- pos = page.find(tag)
- if pos!=-1:
- match = HREF.search(page,pos)
- if match:
- yield urllib.parse.urljoin(url, htmldecode(match.group(1)))
-
-user_agent = "Python-urllib/%s setuptools/%s" % (
- sys.version[:3], require('setuptools')[0].version
-)
-
-class ContentChecker(object):
- """
- A null content checker that defines the interface for checking content
- """
- def feed(self, block):
- """
- Feed a block of data to the hash.
- """
- return
-
- def is_valid(self):
- """
- Check the hash. Return False if validation fails.
- """
- return True
-
- def report(self, reporter, template):
- """
- Call reporter with information about the checker (hash name)
- substituted into the template.
- """
- return
-
-class HashChecker(ContentChecker):
- pattern = re.compile(
- r'(?P<hash_name>sha1|sha224|sha384|sha256|sha512|md5)='
- r'(?P<expected>[a-f0-9]+)'
- )
-
- def __init__(self, hash_name, expected):
- self.hash_name = hash_name
- self.hash = hashlib.new(hash_name)
- self.expected = expected
-
- @classmethod
- def from_url(cls, url):
- "Construct a (possibly null) ContentChecker from a URL"
- fragment = urllib.parse.urlparse(url)[-1]
- if not fragment:
- return ContentChecker()
- match = cls.pattern.search(fragment)
- if not match:
- return ContentChecker()
- return cls(**match.groupdict())
-
- def feed(self, block):
- self.hash.update(block)
-
- def is_valid(self):
- return self.hash.hexdigest() == self.expected
-
- def report(self, reporter, template):
- msg = template % self.hash_name
- return reporter(msg)
-
-
-class PackageIndex(Environment):
- """A distribution index that scans web pages for download URLs"""
-
- def __init__(
- self, index_url="https://pypi.python.org/simple", hosts=('*',),
- ca_bundle=None, verify_ssl=True, *args, **kw
- ):
- Environment.__init__(self,*args,**kw)
- self.index_url = index_url + "/"[:not index_url.endswith('/')]
- self.scanned_urls = {}
- self.fetched_urls = {}
- self.package_pages = {}
- self.allows = re.compile('|'.join(map(translate,hosts))).match
- self.to_scan = []
- if verify_ssl and ssl_support.is_available and (ca_bundle or ssl_support.find_ca_bundle()):
- self.opener = ssl_support.opener_for(ca_bundle)
- else: self.opener = urllib.request.urlopen
-
- def process_url(self, url, retrieve=False):
- """Evaluate a URL as a possible download, and maybe retrieve it"""
- if url in self.scanned_urls and not retrieve:
- return
- self.scanned_urls[url] = True
- if not URL_SCHEME(url):
- self.process_filename(url)
- return
- else:
- dists = list(distros_for_url(url))
- if dists:
- if not self.url_ok(url):
- return
- self.debug("Found link: %s", url)
-
- if dists or not retrieve or url in self.fetched_urls:
- list(map(self.add, dists))
- return # don't need the actual page
-
- if not self.url_ok(url):
- self.fetched_urls[url] = True
- return
-
- self.info("Reading %s", url)
- self.fetched_urls[url] = True # prevent multiple fetch attempts
- f = self.open_url(url, "Download error on %s: %%s -- Some packages may not be found!" % url)
- if f is None: return
- self.fetched_urls[f.url] = True
- if 'html' not in f.headers.get('content-type', '').lower():
- f.close() # not html, we can't process it
- return
-
- base = f.url # handle redirects
- page = f.read()
- if not isinstance(page, str): # We are in Python 3 and got bytes. We want str.
- if isinstance(f, urllib.error.HTTPError):
- # Errors have no charset, assume latin1:
- charset = 'latin-1'
- else:
- charset = f.headers.get_param('charset') or 'latin-1'
- page = page.decode(charset, "ignore")
- f.close()
- for match in HREF.finditer(page):
- link = urllib.parse.urljoin(base, htmldecode(match.group(1)))
- self.process_url(link)
- if url.startswith(self.index_url) and getattr(f,'code',None)!=404:
- page = self.process_index(url, page)
-
- def process_filename(self, fn, nested=False):
- # process filenames or directories
- if not os.path.exists(fn):
- self.warn("Not found: %s", fn)
- return
-
- if os.path.isdir(fn) and not nested:
- path = os.path.realpath(fn)
- for item in os.listdir(path):
- self.process_filename(os.path.join(path,item), True)
-
- dists = distros_for_filename(fn)
- if dists:
- self.debug("Found: %s", fn)
- list(map(self.add, dists))
-
- def url_ok(self, url, fatal=False):
- s = URL_SCHEME(url)
- if (s and s.group(1).lower()=='file') or self.allows(urllib.parse.urlparse(url)[1]):
- return True
- msg = ("\nNote: Bypassing %s (disallowed host; see "
- "http://bit.ly/1dg9ijs for details).\n")
- if fatal:
- raise DistutilsError(msg % url)
- else:
- self.warn(msg, url)
-
- def scan_egg_links(self, search_path):
- dirs = filter(os.path.isdir, search_path)
- egg_links = (
- (path, entry)
- for path in dirs
- for entry in os.listdir(path)
- if entry.endswith('.egg-link')
- )
- list(itertools.starmap(self.scan_egg_link, egg_links))
-
- def scan_egg_link(self, path, entry):
- with open(os.path.join(path, entry)) as raw_lines:
- # filter non-empty lines
- lines = list(filter(None, map(str.strip, raw_lines)))
-
- if len(lines) != 2:
- # format is not recognized; punt
- return
-
- egg_path, setup_path = lines
-
- for dist in find_distributions(os.path.join(path, egg_path)):
- dist.location = os.path.join(path, *lines)
- dist.precedence = SOURCE_DIST
- self.add(dist)
-
- def process_index(self,url,page):
- """Process the contents of a PyPI page"""
- def scan(link):
- # Process a URL to see if it's for a package page
- if link.startswith(self.index_url):
- parts = list(map(
- urllib.parse.unquote, link[len(self.index_url):].split('/')
- ))
- if len(parts)==2 and '#' not in parts[1]:
- # it's a package page, sanitize and index it
- pkg = safe_name(parts[0])
- ver = safe_version(parts[1])
- self.package_pages.setdefault(pkg.lower(),{})[link] = True
- return to_filename(pkg), to_filename(ver)
- return None, None
-
- # process an index page into the package-page index
- for match in HREF.finditer(page):
- try:
- scan(urllib.parse.urljoin(url, htmldecode(match.group(1))))
- except ValueError:
- pass
-
- pkg, ver = scan(url) # ensure this page is in the page index
- if pkg:
- # process individual package page
- for new_url in find_external_links(url, page):
- # Process the found URL
- base, frag = egg_info_for_url(new_url)
- if base.endswith('.py') and not frag:
- if ver:
- new_url+='#egg=%s-%s' % (pkg,ver)
- else:
- self.need_version_info(url)
- self.scan_url(new_url)
-
- return PYPI_MD5.sub(
- lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page
- )
- else:
- return "" # no sense double-scanning non-package pages
-
- def need_version_info(self, url):
- self.scan_all(
- "Page at %s links to .py file(s) without version info; an index "
- "scan is required.", url
- )
-
- def scan_all(self, msg=None, *args):
- if self.index_url not in self.fetched_urls:
- if msg: self.warn(msg,*args)
- self.info(
- "Scanning index of all packages (this may take a while)"
- )
- self.scan_url(self.index_url)
-
- def find_packages(self, requirement):
- self.scan_url(self.index_url + requirement.unsafe_name+'/')
-
- if not self.package_pages.get(requirement.key):
- # Fall back to safe version of the name
- self.scan_url(self.index_url + requirement.project_name+'/')
-
- if not self.package_pages.get(requirement.key):
- # We couldn't find the target package, so search the index page too
- self.not_found_in_index(requirement)
-
- for url in list(self.package_pages.get(requirement.key,())):
- # scan each page that might be related to the desired package
- self.scan_url(url)
-
- def obtain(self, requirement, installer=None):
- self.prescan()
- self.find_packages(requirement)
- for dist in self[requirement.key]:
- if dist in requirement:
- return dist
- self.debug("%s does not match %s", requirement, dist)
- return super(PackageIndex, self).obtain(requirement,installer)
-
- def check_hash(self, checker, filename, tfp):
- """
- checker is a ContentChecker
- """
- checker.report(self.debug,
- "Validating %%s checksum for %s" % filename)
- if not checker.is_valid():
- tfp.close()
- os.unlink(filename)
- raise DistutilsError(
- "%s validation failed for %s; "
- "possible download problem?" % (
- checker.hash.name, os.path.basename(filename))
- )
-
- def add_find_links(self, urls):
- """Add `urls` to the list that will be prescanned for searches"""
- for url in urls:
- if (
- self.to_scan is None # if we have already "gone online"
- or not URL_SCHEME(url) # or it's a local file/directory
- or url.startswith('file:')
- or list(distros_for_url(url)) # or a direct package link
- ):
- # then go ahead and process it now
- self.scan_url(url)
- else:
- # otherwise, defer retrieval till later
- self.to_scan.append(url)
-
- def prescan(self):
- """Scan urls scheduled for prescanning (e.g. --find-links)"""
- if self.to_scan:
- list(map(self.scan_url, self.to_scan))
- self.to_scan = None # from now on, go ahead and process immediately
-
- def not_found_in_index(self, requirement):
- if self[requirement.key]: # we've seen at least one distro
- meth, msg = self.info, "Couldn't retrieve index page for %r"
- else: # no distros seen for this name, might be misspelled
- meth, msg = (self.warn,
- "Couldn't find index page for %r (maybe misspelled?)")
- meth(msg, requirement.unsafe_name)
- self.scan_all()
-
- def download(self, spec, tmpdir):
- """Locate and/or download `spec` to `tmpdir`, returning a local path
-
- `spec` may be a ``Requirement`` object, or a string containing a URL,
- an existing local filename, or a project/version requirement spec
- (i.e. the string form of a ``Requirement`` object). If it is the URL
- of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one
- that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is
- automatically created alongside the downloaded file.
-
- If `spec` is a ``Requirement`` object or a string containing a
- project/version requirement spec, this method returns the location of
- a matching distribution (possibly after downloading it to `tmpdir`).
- If `spec` is a locally existing file or directory name, it is simply
- returned unchanged. If `spec` is a URL, it is downloaded to a subpath
- of `tmpdir`, and the local filename is returned. Various errors may be
- raised if a problem occurs during downloading.
- """
- if not isinstance(spec,Requirement):
- scheme = URL_SCHEME(spec)
- if scheme:
- # It's a url, download it to tmpdir
- found = self._download_url(scheme.group(1), spec, tmpdir)
- base, fragment = egg_info_for_url(spec)
- if base.endswith('.py'):
- found = self.gen_setup(found,fragment,tmpdir)
- return found
- elif os.path.exists(spec):
- # Existing file or directory, just return it
- return spec
- else:
- try:
- spec = Requirement.parse(spec)
- except ValueError:
- raise DistutilsError(
- "Not a URL, existing file, or requirement spec: %r" %
- (spec,)
- )
- return getattr(self.fetch_distribution(spec, tmpdir),'location',None)
-
- def fetch_distribution(
- self, requirement, tmpdir, force_scan=False, source=False,
- develop_ok=False, local_index=None
- ):
- """Obtain a distribution suitable for fulfilling `requirement`
-
- `requirement` must be a ``pkg_resources.Requirement`` instance.
- If necessary, or if the `force_scan` flag is set, the requirement is
- searched for in the (online) package index as well as the locally
- installed packages. If a distribution matching `requirement` is found,
- the returned distribution's ``location`` is the value you would have
- gotten from calling the ``download()`` method with the matching
- distribution's URL or filename. If no matching distribution is found,
- ``None`` is returned.
-
- If the `source` flag is set, only source distributions and source
- checkout links will be considered. Unless the `develop_ok` flag is
- set, development and system eggs (i.e., those using the ``.egg-info``
- format) will be ignored.
- """
- # process a Requirement
- self.info("Searching for %s", requirement)
- skipped = {}
- dist = None
-
- def find(req, env=None):
- if env is None:
- env = self
- # Find a matching distribution; may be called more than once
-
- for dist in env[req.key]:
-
- if dist.precedence==DEVELOP_DIST and not develop_ok:
- if dist not in skipped:
- self.warn("Skipping development or system egg: %s",dist)
- skipped[dist] = 1
- continue
-
- if dist in req and (dist.precedence<=SOURCE_DIST or not source):
- return dist
-
- if force_scan:
- self.prescan()
- self.find_packages(requirement)
- dist = find(requirement)
-
- if local_index is not None:
- dist = dist or find(requirement, local_index)
-
- if dist is None:
- if self.to_scan is not None:
- self.prescan()
- dist = find(requirement)
-
- if dist is None and not force_scan:
- self.find_packages(requirement)
- dist = find(requirement)
-
- if dist is None:
- self.warn(
- "No local packages or download links found for %s%s",
- (source and "a source distribution of " or ""),
- requirement,
- )
- else:
- self.info("Best match: %s", dist)
- return dist.clone(location=self.download(dist.location, tmpdir))
-
- def fetch(self, requirement, tmpdir, force_scan=False, source=False):
- """Obtain a file suitable for fulfilling `requirement`
-
- DEPRECATED; use the ``fetch_distribution()`` method now instead. For
- backward compatibility, this routine is identical but returns the
- ``location`` of the downloaded distribution instead of a distribution
- object.
- """
- dist = self.fetch_distribution(requirement,tmpdir,force_scan,source)
- if dist is not None:
- return dist.location
- return None
-
- def gen_setup(self, filename, fragment, tmpdir):
- match = EGG_FRAGMENT.match(fragment)
- dists = match and [
- d for d in
- interpret_distro_name(filename, match.group(1), None) if d.version
- ] or []
-
- if len(dists)==1: # unambiguous ``#egg`` fragment
- basename = os.path.basename(filename)
-
- # Make sure the file has been downloaded to the temp dir.
- if os.path.dirname(filename) != tmpdir:
- dst = os.path.join(tmpdir, basename)
- from setuptools.command.easy_install import samefile
- if not samefile(filename, dst):
- shutil.copy2(filename, dst)
- filename=dst
-
- with open(os.path.join(tmpdir, 'setup.py'), 'w') as file:
- file.write(
- "from setuptools import setup\n"
- "setup(name=%r, version=%r, py_modules=[%r])\n"
- % (
- dists[0].project_name, dists[0].version,
- os.path.splitext(basename)[0]
- )
- )
- return filename
-
- elif match:
- raise DistutilsError(
- "Can't unambiguously interpret project/version identifier %r; "
- "any dashes in the name or version should be escaped using "
- "underscores. %r" % (fragment,dists)
- )
- else:
- raise DistutilsError(
- "Can't process plain .py files without an '#egg=name-version'"
- " suffix to enable automatic setup script generation."
- )
-
- dl_blocksize = 8192
- def _download_to(self, url, filename):
- self.info("Downloading %s", url)
- # Download the file
- fp, info = None, None
- try:
- checker = HashChecker.from_url(url)
- fp = self.open_url(strip_fragment(url))
- if isinstance(fp, urllib.error.HTTPError):
- raise DistutilsError(
- "Can't download %s: %s %s" % (url, fp.code,fp.msg)
- )
- headers = fp.info()
- blocknum = 0
- bs = self.dl_blocksize
- size = -1
- if "content-length" in headers:
- # Some servers return multiple Content-Length headers :(
- sizes = get_all_headers(headers, 'Content-Length')
- size = max(map(int, sizes))
- self.reporthook(url, filename, blocknum, bs, size)
- with open(filename,'wb') as tfp:
- while True:
- block = fp.read(bs)
- if block:
- checker.feed(block)
- tfp.write(block)
- blocknum += 1
- self.reporthook(url, filename, blocknum, bs, size)
- else:
- break
- self.check_hash(checker, filename, tfp)
- return headers
- finally:
- if fp: fp.close()
-
- def reporthook(self, url, filename, blocknum, blksize, size):
- pass # no-op
-
- def open_url(self, url, warning=None):
- if url.startswith('file:'):
- return local_open(url)
- try:
- return open_with_auth(url, self.opener)
- except (ValueError, http_client.InvalidURL) as v:
- msg = ' '.join([str(arg) for arg in v.args])
- if warning:
- self.warn(warning, msg)
- else:
- raise DistutilsError('%s %s' % (url, msg))
- except urllib.error.HTTPError as v:
- return v
- except urllib.error.URLError as v:
- if warning:
- self.warn(warning, v.reason)
- else:
- raise DistutilsError("Download error for %s: %s"
- % (url, v.reason))
- except http_client.BadStatusLine as v:
- if warning:
- self.warn(warning, v.line)
- else:
- raise DistutilsError(
- '%s returned a bad status line. The server might be '
- 'down, %s' %
- (url, v.line)
- )
- except http_client.HTTPException as v:
- if warning:
- self.warn(warning, v)
- else:
- raise DistutilsError("Download error for %s: %s"
- % (url, v))
-
- def _download_url(self, scheme, url, tmpdir):
- # Determine download filename
- #
- name, fragment = egg_info_for_url(url)
- if name:
- while '..' in name:
- name = name.replace('..','.').replace('\\','_')
- else:
- name = "__downloaded__" # default if URL has no path contents
-
- if name.endswith('.egg.zip'):
- name = name[:-4] # strip the extra .zip before download
-
- filename = os.path.join(tmpdir,name)
-
- # Download the file
- #
- if scheme=='svn' or scheme.startswith('svn+'):
- return self._download_svn(url, filename)
- elif scheme=='git' or scheme.startswith('git+'):
- return self._download_git(url, filename)
- elif scheme.startswith('hg+'):
- return self._download_hg(url, filename)
- elif scheme=='file':
- return urllib.request.url2pathname(urllib.parse.urlparse(url)[2])
- else:
- self.url_ok(url, True) # raises error if not allowed
- return self._attempt_download(url, filename)
-
- def scan_url(self, url):
- self.process_url(url, True)
-
- def _attempt_download(self, url, filename):
- headers = self._download_to(url, filename)
- if 'html' in headers.get('content-type','').lower():
- return self._download_html(url, headers, filename)
- else:
- return filename
-
- def _download_html(self, url, headers, filename):
- file = open(filename)
- for line in file:
- if line.strip():
- # Check for a subversion index page
- if re.search(r'<title>([^- ]+ - )?Revision \d+:', line):
- # it's a subversion index page:
- file.close()
- os.unlink(filename)
- return self._download_svn(url, filename)
- break # not an index page
- file.close()
- os.unlink(filename)
- raise DistutilsError("Unexpected HTML page found at "+url)
-
- def _download_svn(self, url, filename):
- url = url.split('#',1)[0] # remove any fragment for svn's sake
- creds = ''
- if url.lower().startswith('svn:') and '@' in url:
- scheme, netloc, path, p, q, f = urllib.parse.urlparse(url)
- if not netloc and path.startswith('//') and '/' in path[2:]:
- netloc, path = path[2:].split('/',1)
- auth, host = splituser(netloc)
- if auth:
- if ':' in auth:
- user, pw = auth.split(':',1)
- creds = " --username=%s --password=%s" % (user, pw)
- else:
- creds = " --username="+auth
- netloc = host
- parts = scheme, netloc, url, p, q, f
- url = urllib.parse.urlunparse(parts)
- self.info("Doing subversion checkout from %s to %s", url, filename)
- os.system("svn checkout%s -q %s %s" % (creds, url, filename))
- return filename
-
- @staticmethod
- def _vcs_split_rev_from_url(url, pop_prefix=False):
- scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)
-
- scheme = scheme.split('+', 1)[-1]
-
- # Some fragment identification fails
- path = path.split('#',1)[0]
-
- rev = None
- if '@' in path:
- path, rev = path.rsplit('@', 1)
-
- # Also, discard fragment
- url = urllib.parse.urlunsplit((scheme, netloc, path, query, ''))
-
- return url, rev
-
- def _download_git(self, url, filename):
- filename = filename.split('#',1)[0]
- url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
-
- self.info("Doing git clone from %s to %s", url, filename)
- os.system("git clone --quiet %s %s" % (url, filename))
-
- if rev is not None:
- self.info("Checking out %s", rev)
- os.system("(cd %s && git checkout --quiet %s)" % (
- filename,
- rev,
- ))
-
- return filename
-
- def _download_hg(self, url, filename):
- filename = filename.split('#',1)[0]
- url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
-
- self.info("Doing hg clone from %s to %s", url, filename)
- os.system("hg clone --quiet %s %s" % (url, filename))
-
- if rev is not None:
- self.info("Updating to %s", rev)
- os.system("(cd %s && hg up -C -r %s >&-)" % (
- filename,
- rev,
- ))
-
- return filename
-
- def debug(self, msg, *args):
- log.debug(msg, *args)
-
- def info(self, msg, *args):
- log.info(msg, *args)
-
- def warn(self, msg, *args):
- log.warn(msg, *args)
-
-# This pattern matches a character entity reference (a decimal numeric
-# references, a hexadecimal numeric reference, or a named reference).
-entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
-
-def uchr(c):
- if not isinstance(c, int):
- return c
- if c>255: return six.unichr(c)
- return chr(c)
-
-def decode_entity(match):
- what = match.group(1)
- if what.startswith('#x'):
- what = int(what[2:], 16)
- elif what.startswith('#'):
- what = int(what[1:])
- else:
- what = six.moves.html_entities.name2codepoint.get(what, match.group(0))
- return uchr(what)
-
-def htmldecode(text):
- """Decode HTML entities in the given text."""
- return entity_sub(decode_entity, text)
-
-def socket_timeout(timeout=15):
- def _socket_timeout(func):
- def _socket_timeout(*args, **kwargs):
- old_timeout = socket.getdefaulttimeout()
- socket.setdefaulttimeout(timeout)
- try:
- return func(*args, **kwargs)
- finally:
- socket.setdefaulttimeout(old_timeout)
- return _socket_timeout
- return _socket_timeout
-
-def _encode_auth(auth):
- """
- A function compatible with Python 2.3-3.3 that will encode
- auth from a URL suitable for an HTTP header.
- >>> str(_encode_auth('username%3Apassword'))
- 'dXNlcm5hbWU6cGFzc3dvcmQ='
-
- Long auth strings should not cause a newline to be inserted.
- >>> long_auth = 'username:' + 'password'*10
- >>> chr(10) in str(_encode_auth(long_auth))
- False
- """
- auth_s = urllib.parse.unquote(auth)
- # convert to bytes
- auth_bytes = auth_s.encode()
- # use the legacy interface for Python 2.3 support
- encoded_bytes = base64.encodestring(auth_bytes)
- # convert back to a string
- encoded = encoded_bytes.decode()
- # strip the trailing carriage return
- return encoded.replace('\n','')
-
-class Credential(object):
- """
- A username/password pair. Use like a namedtuple.
- """
- def __init__(self, username, password):
- self.username = username
- self.password = password
-
- def __iter__(self):
- yield self.username
- yield self.password
-
- def __str__(self):
- return '%(username)s:%(password)s' % vars(self)
-
-class PyPIConfig(configparser.RawConfigParser):
-
- def __init__(self):
- """
- Load from ~/.pypirc
- """
- defaults = dict.fromkeys(['username', 'password', 'repository'], '')
- configparser.RawConfigParser.__init__(self, defaults)
-
- rc = os.path.join(os.path.expanduser('~'), '.pypirc')
- if os.path.exists(rc):
- self.read(rc)
-
- @property
- def creds_by_repository(self):
- sections_with_repositories = [
- section for section in self.sections()
- if self.get(section, 'repository').strip()
- ]
-
- return dict(map(self._get_repo_cred, sections_with_repositories))
-
- def _get_repo_cred(self, section):
- repo = self.get(section, 'repository').strip()
- return repo, Credential(
- self.get(section, 'username').strip(),
- self.get(section, 'password').strip(),
- )
-
- def find_credential(self, url):
- """
- If the URL indicated appears to be a repository defined in this
- config, return the credential for that repository.
- """
- for repository, cred in self.creds_by_repository.items():
- if url.startswith(repository):
- return cred
-
-
-def open_with_auth(url, opener=urllib.request.urlopen):
- """Open a urllib2 request, handling HTTP authentication"""
-
- scheme, netloc, path, params, query, frag = urllib.parse.urlparse(url)
-
- # Double scheme does not raise on Mac OS X as revealed by a
- # failing test. We would expect "nonnumeric port". Refs #20.
- if netloc.endswith(':'):
- raise http_client.InvalidURL("nonnumeric port: ''")
-
- if scheme in ('http', 'https'):
- auth, host = splituser(netloc)
- else:
- auth = None
-
- if not auth:
- cred = PyPIConfig().find_credential(url)
- if cred:
- auth = str(cred)
- info = cred.username, url
- log.info('Authenticating as %s for %s (from .pypirc)' % info)
-
- if auth:
- auth = "Basic " + _encode_auth(auth)
- parts = scheme, host, path, params, query, frag
- new_url = urllib.parse.urlunparse(parts)
- request = urllib.request.Request(new_url)
- request.add_header("Authorization", auth)
- else:
- request = urllib.request.Request(url)
-
- request.add_header('User-Agent', user_agent)
- fp = opener(request)
-
- if auth:
- # Put authentication info back into request URL if same host,
- # so that links found on the page will work
- s2, h2, path2, param2, query2, frag2 = urllib.parse.urlparse(fp.url)
- if s2==scheme and h2==host:
- parts = s2, netloc, path2, param2, query2, frag2
- fp.url = urllib.parse.urlunparse(parts)
-
- return fp
-
-# adding a timeout to avoid freezing package_index
-open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth)
-
-
-def fix_sf_url(url):
- return url # backward compatibility
-
-def local_open(url):
- """Read a local path, with special support for directories"""
- scheme, server, path, param, query, frag = urllib.parse.urlparse(url)
- filename = urllib.request.url2pathname(path)
- if os.path.isfile(filename):
- return urllib.request.urlopen(url)
- elif path.endswith('/') and os.path.isdir(filename):
- files = []
- for f in os.listdir(filename):
- filepath = os.path.join(filename, f)
- if f == 'index.html':
- with open(filepath, 'r') as fp:
- body = fp.read()
- break
- elif os.path.isdir(filepath):
- f += '/'
- files.append('<a href="{name}">{name}</a>'.format(name=f))
- else:
- tmpl = ("<html><head><title>{url}</title>"
- "</head><body>{files}</body></html>")
- body = tmpl.format(url=url, files='\n'.join(files))
- status, message = 200, "OK"
- else:
- status, message, body = 404, "Path not found", "Not found"
-
- headers = {'content-type': 'text/html'}
- body_stream = six.StringIO(body)
- return urllib.error.HTTPError(url, status, message, headers, body_stream)
diff --git a/setuptools/py26compat.py b/setuptools/py26compat.py
deleted file mode 100644
index e52bd85b..00000000
--- a/setuptools/py26compat.py
+++ /dev/null
@@ -1,22 +0,0 @@
-"""
-Compatibility Support for Python 2.6 and earlier
-"""
-
-import sys
-
-try:
- from urllib.parse import splittag
-except ImportError:
- from urllib import splittag
-
-def strip_fragment(url):
- """
- In `Python 8280 <http://bugs.python.org/issue8280>`_, Python 2.7 and
- later was patched to disregard the fragment when making URL requests.
- Do the same for Python 2.6 and earlier.
- """
- url, fragment = splittag(url)
- return url
-
-if sys.version_info >= (2,7):
- strip_fragment = lambda x: x
diff --git a/setuptools/py27compat.py b/setuptools/py27compat.py
deleted file mode 100644
index 9d2886db..00000000
--- a/setuptools/py27compat.py
+++ /dev/null
@@ -1,15 +0,0 @@
-"""
-Compatibility Support for Python 2.7 and earlier
-"""
-
-import sys
-
-def get_all_headers(message, key):
- """
- Given an HTTPMessage, return all headers matching a given key.
- """
- return message.get_all(key)
-
-if sys.version_info < (3,):
- def get_all_headers(message, key):
- return message.getheaders(key)
diff --git a/setuptools/py31compat.py b/setuptools/py31compat.py
deleted file mode 100644
index 8fe6dd9d..00000000
--- a/setuptools/py31compat.py
+++ /dev/null
@@ -1,52 +0,0 @@
-import sys
-import unittest
-
-__all__ = ['get_config_vars', 'get_path']
-
-try:
- # Python 2.7 or >=3.2
- from sysconfig import get_config_vars, get_path
-except ImportError:
- from distutils.sysconfig import get_config_vars, get_python_lib
- def get_path(name):
- if name not in ('platlib', 'purelib'):
- raise ValueError("Name must be purelib or platlib")
- return get_python_lib(name=='platlib')
-
-try:
- # Python >=3.2
- from tempfile import TemporaryDirectory
-except ImportError:
- import shutil
- import tempfile
- class TemporaryDirectory(object):
- """
- Very simple temporary directory context manager.
- Will try to delete afterward, but will also ignore OS and similar
- errors on deletion.
- """
- def __init__(self):
- self.name = None # Handle mkdtemp raising an exception
- self.name = tempfile.mkdtemp()
-
- def __enter__(self):
- return self.name
-
- def __exit__(self, exctype, excvalue, exctrace):
- try:
- shutil.rmtree(self.name, True)
- except OSError: #removal errors are not the only possible
- pass
- self.name = None
-
-
-unittest_main = unittest.main
-
-_PY31 = (3, 1) <= sys.version_info[:2] < (3, 2)
-if _PY31:
- # on Python 3.1, translate testRunner==None to TextTestRunner
- # for compatibility with Python 2.6, 2.7, and 3.2+
- def unittest_main(*args, **kwargs):
- if 'testRunner' in kwargs and kwargs['testRunner'] is None:
- kwargs['testRunner'] = unittest.TextTestRunner
- return unittest.main(*args, **kwargs)
diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
deleted file mode 100755
index 23e296b1..00000000
--- a/setuptools/sandbox.py
+++ /dev/null
@@ -1,496 +0,0 @@
-import os
-import sys
-import tempfile
-import operator
-import functools
-import itertools
-import re
-import contextlib
-import pickle
-
-from setuptools.extern import six
-from setuptools.extern.six.moves import builtins, map
-
-import pkg_resources
-
-if sys.platform.startswith('java'):
- import org.python.modules.posix.PosixModule as _os
-else:
- _os = sys.modules[os.name]
-try:
- _file = file
-except NameError:
- _file = None
-_open = open
-from distutils.errors import DistutilsError
-from pkg_resources import working_set
-
-__all__ = [
- "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
-]
-
-def _execfile(filename, globals, locals=None):
- """
- Python 3 implementation of execfile.
- """
- mode = 'rb'
- with open(filename, mode) as stream:
- script = stream.read()
- # compile() function in Python 2.6 and 3.1 requires LF line endings.
- if sys.version_info[:2] < (2, 7) or sys.version_info[:2] >= (3, 0) and sys.version_info[:2] < (3, 2):
- script = script.replace(b'\r\n', b'\n')
- script = script.replace(b'\r', b'\n')
- if locals is None:
- locals = globals
- code = compile(script, filename, 'exec')
- exec(code, globals, locals)
-
-
-@contextlib.contextmanager
-def save_argv(repl=None):
- saved = sys.argv[:]
- if repl is not None:
- sys.argv[:] = repl
- try:
- yield saved
- finally:
- sys.argv[:] = saved
-
-
-@contextlib.contextmanager
-def save_path():
- saved = sys.path[:]
- try:
- yield saved
- finally:
- sys.path[:] = saved
-
-
-@contextlib.contextmanager
-def override_temp(replacement):
- """
- Monkey-patch tempfile.tempdir with replacement, ensuring it exists
- """
- if not os.path.isdir(replacement):
- os.makedirs(replacement)
-
- saved = tempfile.tempdir
-
- tempfile.tempdir = replacement
-
- try:
- yield
- finally:
- tempfile.tempdir = saved
-
-
-@contextlib.contextmanager
-def pushd(target):
- saved = os.getcwd()
- os.chdir(target)
- try:
- yield saved
- finally:
- os.chdir(saved)
-
-
-class UnpickleableException(Exception):
- """
- An exception representing another Exception that could not be pickled.
- """
- @staticmethod
- def dump(type, exc):
- """
- Always return a dumped (pickled) type and exc. If exc can't be pickled,
- wrap it in UnpickleableException first.
- """
- try:
- return pickle.dumps(type), pickle.dumps(exc)
- except Exception:
- # get UnpickleableException inside the sandbox
- from setuptools.sandbox import UnpickleableException as cls
- return cls.dump(cls, cls(repr(exc)))
-
-
-class ExceptionSaver:
- """
- A Context Manager that will save an exception, serialized, and restore it
- later.
- """
- def __enter__(self):
- return self
-
- def __exit__(self, type, exc, tb):
- if not exc:
- return
-
- # dump the exception
- self._saved = UnpickleableException.dump(type, exc)
- self._tb = tb
-
- # suppress the exception
- return True
-
- def resume(self):
- "restore and re-raise any exception"
-
- if '_saved' not in vars(self):
- return
-
- type, exc = map(pickle.loads, self._saved)
- six.reraise(type, exc, self._tb)
-
-
-@contextlib.contextmanager
-def save_modules():
- """
- Context in which imported modules are saved.
-
- Translates exceptions internal to the context into the equivalent exception
- outside the context.
- """
- saved = sys.modules.copy()
- with ExceptionSaver() as saved_exc:
- yield saved
-
- sys.modules.update(saved)
- # remove any modules imported since
- del_modules = (
- mod_name for mod_name in sys.modules
- if mod_name not in saved
- # exclude any encodings modules. See #285
- and not mod_name.startswith('encodings.')
- )
- _clear_modules(del_modules)
-
- saved_exc.resume()
-
-
-def _clear_modules(module_names):
- for mod_name in list(module_names):
- del sys.modules[mod_name]
-
-
-@contextlib.contextmanager
-def save_pkg_resources_state():
- saved = pkg_resources.__getstate__()
- try:
- yield saved
- finally:
- pkg_resources.__setstate__(saved)
-
-
-@contextlib.contextmanager
-def setup_context(setup_dir):
- temp_dir = os.path.join(setup_dir, 'temp')
- with save_pkg_resources_state():
- with save_modules():
- hide_setuptools()
- with save_path():
- with save_argv():
- with override_temp(temp_dir):
- with pushd(setup_dir):
- # ensure setuptools commands are available
- __import__('setuptools')
- yield
-
-
-def _needs_hiding(mod_name):
- """
- >>> _needs_hiding('setuptools')
- True
- >>> _needs_hiding('pkg_resources')
- True
- >>> _needs_hiding('setuptools_plugin')
- False
- >>> _needs_hiding('setuptools.__init__')
- True
- >>> _needs_hiding('distutils')
- True
- >>> _needs_hiding('os')
- False
- >>> _needs_hiding('Cython')
- True
- """
- pattern = re.compile('(setuptools|pkg_resources|distutils|Cython)(\.|$)')
- return bool(pattern.match(mod_name))
-
-
-def hide_setuptools():
- """
- Remove references to setuptools' modules from sys.modules to allow the
- invocation to import the most appropriate setuptools. This technique is
- necessary to avoid issues such as #315 where setuptools upgrading itself
- would fail to find a function declared in the metadata.
- """
- modules = filter(_needs_hiding, sys.modules)
- _clear_modules(modules)
-
-
-def run_setup(setup_script, args):
- """Run a distutils setup script, sandboxed in its directory"""
- setup_dir = os.path.abspath(os.path.dirname(setup_script))
- with setup_context(setup_dir):
- try:
- sys.argv[:] = [setup_script]+list(args)
- sys.path.insert(0, setup_dir)
- # reset to include setup dir, w/clean callback list
- working_set.__init__()
- working_set.callbacks.append(lambda dist:dist.activate())
- def runner():
- ns = dict(__file__=setup_script, __name__='__main__')
- _execfile(setup_script, ns)
- DirectorySandbox(setup_dir).run(runner)
- except SystemExit as v:
- if v.args and v.args[0]:
- raise
- # Normal exit, just return
-
-
-class AbstractSandbox:
- """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
-
- _active = False
-
- def __init__(self):
- self._attrs = [
- name for name in dir(_os)
- if not name.startswith('_') and hasattr(self,name)
- ]
-
- def _copy(self, source):
- for name in self._attrs:
- setattr(os, name, getattr(source,name))
-
- def run(self, func):
- """Run 'func' under os sandboxing"""
- try:
- self._copy(self)
- if _file:
- builtins.file = self._file
- builtins.open = self._open
- self._active = True
- return func()
- finally:
- self._active = False
- if _file:
- builtins.file = _file
- builtins.open = _open
- self._copy(_os)
-
- def _mk_dual_path_wrapper(name):
- original = getattr(_os,name)
- def wrap(self,src,dst,*args,**kw):
- if self._active:
- src,dst = self._remap_pair(name,src,dst,*args,**kw)
- return original(src,dst,*args,**kw)
- return wrap
-
- for name in ["rename", "link", "symlink"]:
- if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name)
-
- def _mk_single_path_wrapper(name, original=None):
- original = original or getattr(_os,name)
- def wrap(self,path,*args,**kw):
- if self._active:
- path = self._remap_input(name,path,*args,**kw)
- return original(path,*args,**kw)
- return wrap
-
- if _file:
- _file = _mk_single_path_wrapper('file', _file)
- _open = _mk_single_path_wrapper('open', _open)
- for name in [
- "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir",
- "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat",
- "startfile", "mkfifo", "mknod", "pathconf", "access"
- ]:
- if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name)
-
- def _mk_single_with_return(name):
- original = getattr(_os,name)
- def wrap(self,path,*args,**kw):
- if self._active:
- path = self._remap_input(name,path,*args,**kw)
- return self._remap_output(name, original(path,*args,**kw))
- return original(path,*args,**kw)
- return wrap
-
- for name in ['readlink', 'tempnam']:
- if hasattr(_os,name): locals()[name] = _mk_single_with_return(name)
-
- def _mk_query(name):
- original = getattr(_os,name)
- def wrap(self,*args,**kw):
- retval = original(*args,**kw)
- if self._active:
- return self._remap_output(name, retval)
- return retval
- return wrap
-
- for name in ['getcwd', 'tmpnam']:
- if hasattr(_os,name): locals()[name] = _mk_query(name)
-
- def _validate_path(self,path):
- """Called to remap or validate any path, whether input or output"""
- return path
-
- def _remap_input(self,operation,path,*args,**kw):
- """Called for path inputs"""
- return self._validate_path(path)
-
- def _remap_output(self,operation,path):
- """Called for path outputs"""
- return self._validate_path(path)
-
- def _remap_pair(self,operation,src,dst,*args,**kw):
- """Called for path pairs like rename, link, and symlink operations"""
- return (
- self._remap_input(operation+'-from',src,*args,**kw),
- self._remap_input(operation+'-to',dst,*args,**kw)
- )
-
-
-if hasattr(os, 'devnull'):
- _EXCEPTIONS = [os.devnull,]
-else:
- _EXCEPTIONS = []
-
-try:
- from win32com.client.gencache import GetGeneratePath
- _EXCEPTIONS.append(GetGeneratePath())
- del GetGeneratePath
-except ImportError:
- # it appears pywin32 is not installed, so no need to exclude.
- pass
-
-class DirectorySandbox(AbstractSandbox):
- """Restrict operations to a single subdirectory - pseudo-chroot"""
-
- write_ops = dict.fromkeys([
- "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir",
- "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",
- ])
-
- _exception_patterns = [
- # Allow lib2to3 to attempt to save a pickled grammar object (#121)
- '.*lib2to3.*\.pickle$',
- ]
- "exempt writing to paths that match the pattern"
-
- def __init__(self, sandbox, exceptions=_EXCEPTIONS):
- self._sandbox = os.path.normcase(os.path.realpath(sandbox))
- self._prefix = os.path.join(self._sandbox,'')
- self._exceptions = [
- os.path.normcase(os.path.realpath(path))
- for path in exceptions
- ]
- AbstractSandbox.__init__(self)
-
- def _violation(self, operation, *args, **kw):
- from setuptools.sandbox import SandboxViolation
- raise SandboxViolation(operation, args, kw)
-
- if _file:
- def _file(self, path, mode='r', *args, **kw):
- if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
- self._violation("file", path, mode, *args, **kw)
- return _file(path,mode,*args,**kw)
-
- def _open(self, path, mode='r', *args, **kw):
- if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
- self._violation("open", path, mode, *args, **kw)
- return _open(path,mode,*args,**kw)
-
- def tmpnam(self):
- self._violation("tmpnam")
-
- def _ok(self, path):
- active = self._active
- try:
- self._active = False
- realpath = os.path.normcase(os.path.realpath(path))
- return (
- self._exempted(realpath)
- or realpath == self._sandbox
- or realpath.startswith(self._prefix)
- )
- finally:
- self._active = active
-
- def _exempted(self, filepath):
- start_matches = (
- filepath.startswith(exception)
- for exception in self._exceptions
- )
- pattern_matches = (
- re.match(pattern, filepath)
- for pattern in self._exception_patterns
- )
- candidates = itertools.chain(start_matches, pattern_matches)
- return any(candidates)
-
- def _remap_input(self, operation, path, *args, **kw):
- """Called for path inputs"""
- if operation in self.write_ops and not self._ok(path):
- self._violation(operation, os.path.realpath(path), *args, **kw)
- return path
-
- def _remap_pair(self, operation, src, dst, *args, **kw):
- """Called for path pairs like rename, link, and symlink operations"""
- if not self._ok(src) or not self._ok(dst):
- self._violation(operation, src, dst, *args, **kw)
- return (src,dst)
-
- def open(self, file, flags, mode=0o777, *args, **kw):
- """Called for low-level os.open()"""
- if flags & WRITE_FLAGS and not self._ok(file):
- self._violation("os.open", file, flags, mode, *args, **kw)
- return _os.open(file,flags,mode, *args, **kw)
-
-WRITE_FLAGS = functools.reduce(
- operator.or_, [getattr(_os, a, 0) for a in
- "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()]
-)
-
-class SandboxViolation(DistutilsError):
- """A setup script attempted to modify the filesystem outside the sandbox"""
-
- def __str__(self):
- return """SandboxViolation: %s%r %s
-
-The package setup script has attempted to modify files on your system
-that are not within the EasyInstall build area, and has been aborted.
-
-This package cannot be safely installed by EasyInstall, and may not
-support alternate installation locations even if you run its setup
-script by hand. Please inform the package's author and the EasyInstall
-maintainers to find out if a fix or workaround is available.""" % self.args
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-#
diff --git a/setuptools/script (dev).tmpl b/setuptools/script (dev).tmpl
deleted file mode 100644
index d58b1bb5..00000000
--- a/setuptools/script (dev).tmpl
+++ /dev/null
@@ -1,5 +0,0 @@
-# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r
-__requires__ = %(spec)r
-__import__('pkg_resources').require(%(spec)r)
-__file__ = %(dev_path)r
-exec(compile(open(__file__).read(), __file__, 'exec'))
diff --git a/setuptools/script.tmpl b/setuptools/script.tmpl
deleted file mode 100644
index ff5efbca..00000000
--- a/setuptools/script.tmpl
+++ /dev/null
@@ -1,3 +0,0 @@
-# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r
-__requires__ = %(spec)r
-__import__('pkg_resources').run_script(%(spec)r, %(script_name)r)
diff --git a/setuptools/site-patch.py b/setuptools/site-patch.py
deleted file mode 100644
index c2168019..00000000
--- a/setuptools/site-patch.py
+++ /dev/null
@@ -1,76 +0,0 @@
-def __boot():
- import sys
- import os
- PYTHONPATH = os.environ.get('PYTHONPATH')
- if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH):
- PYTHONPATH = []
- else:
- PYTHONPATH = PYTHONPATH.split(os.pathsep)
-
- pic = getattr(sys,'path_importer_cache',{})
- stdpath = sys.path[len(PYTHONPATH):]
- mydir = os.path.dirname(__file__)
- #print "searching",stdpath,sys.path
-
- for item in stdpath:
- if item==mydir or not item:
- continue # skip if current dir. on Windows, or my own directory
- importer = pic.get(item)
- if importer is not None:
- loader = importer.find_module('site')
- if loader is not None:
- # This should actually reload the current module
- loader.load_module('site')
- break
- else:
- try:
- import imp # Avoid import loop in Python >= 3.3
- stream, path, descr = imp.find_module('site',[item])
- except ImportError:
- continue
- if stream is None:
- continue
- try:
- # This should actually reload the current module
- imp.load_module('site',stream,path,descr)
- finally:
- stream.close()
- break
- else:
- raise ImportError("Couldn't find the real 'site' module")
-
- #print "loaded", __file__
-
- known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp
-
- oldpos = getattr(sys,'__egginsert',0) # save old insertion position
- sys.__egginsert = 0 # and reset the current one
-
- for item in PYTHONPATH:
- addsitedir(item)
-
- sys.__egginsert += oldpos # restore effective old position
-
- d, nd = makepath(stdpath[0])
- insert_at = None
- new_path = []
-
- for item in sys.path:
- p, np = makepath(item)
-
- if np==nd and insert_at is None:
- # We've hit the first 'system' path entry, so added entries go here
- insert_at = len(new_path)
-
- if np in known_paths or insert_at is None:
- new_path.append(item)
- else:
- # new path after the insert point, back-insert it
- new_path.insert(insert_at, item)
- insert_at += 1
-
- sys.path[:] = new_path
-
-if __name__=='site':
- __boot()
- del __boot
diff --git a/setuptools/ssl_support.py b/setuptools/ssl_support.py
deleted file mode 100644
index 657197cf..00000000
--- a/setuptools/ssl_support.py
+++ /dev/null
@@ -1,243 +0,0 @@
-import os
-import socket
-import atexit
-import re
-
-from setuptools.extern.six.moves import urllib, http_client, map
-
-import pkg_resources
-from pkg_resources import ResolutionError, ExtractionError
-
-try:
- import ssl
-except ImportError:
- ssl = None
-
-__all__ = [
- 'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths',
- 'opener_for'
-]
-
-cert_paths = """
-/etc/pki/tls/certs/ca-bundle.crt
-/etc/ssl/certs/ca-certificates.crt
-/usr/share/ssl/certs/ca-bundle.crt
-/usr/local/share/certs/ca-root.crt
-/etc/ssl/cert.pem
-/System/Library/OpenSSL/certs/cert.pem
-/usr/local/share/certs/ca-root-nss.crt
-""".strip().split()
-
-
-try:
- HTTPSHandler = urllib.request.HTTPSHandler
- HTTPSConnection = http_client.HTTPSConnection
-except AttributeError:
- HTTPSHandler = HTTPSConnection = object
-
-is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection)
-
-
-try:
- from ssl import CertificateError, match_hostname
-except ImportError:
- try:
- from backports.ssl_match_hostname import CertificateError
- from backports.ssl_match_hostname import match_hostname
- except ImportError:
- CertificateError = None
- match_hostname = None
-
-if not CertificateError:
- class CertificateError(ValueError):
- pass
-
-if not match_hostname:
- def _dnsname_match(dn, hostname, max_wildcards=1):
- """Matching according to RFC 6125, section 6.4.3
-
- http://tools.ietf.org/html/rfc6125#section-6.4.3
- """
- pats = []
- if not dn:
- return False
-
- # Ported from python3-syntax:
- # leftmost, *remainder = dn.split(r'.')
- parts = dn.split(r'.')
- leftmost = parts[0]
- remainder = parts[1:]
-
- wildcards = leftmost.count('*')
- if wildcards > max_wildcards:
- # Issue #17980: avoid denials of service by refusing more
- # than one wildcard per fragment. A survey of established
- # policy among SSL implementations showed it to be a
- # reasonable choice.
- raise CertificateError(
- "too many wildcards in certificate DNS name: " + repr(dn))
-
- # speed up common case w/o wildcards
- if not wildcards:
- return dn.lower() == hostname.lower()
-
- # RFC 6125, section 6.4.3, subitem 1.
- # The client SHOULD NOT attempt to match a presented identifier in which
- # the wildcard character comprises a label other than the left-most label.
- if leftmost == '*':
- # When '*' is a fragment by itself, it matches a non-empty dotless
- # fragment.
- pats.append('[^.]+')
- elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
- # RFC 6125, section 6.4.3, subitem 3.
- # The client SHOULD NOT attempt to match a presented identifier
- # where the wildcard character is embedded within an A-label or
- # U-label of an internationalized domain name.
- pats.append(re.escape(leftmost))
- else:
- # Otherwise, '*' matches any dotless string, e.g. www*
- pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
-
- # add the remaining fragments, ignore any wildcards
- for frag in remainder:
- pats.append(re.escape(frag))
-
- pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
- return pat.match(hostname)
-
- def match_hostname(cert, hostname):
- """Verify that *cert* (in decoded format as returned by
- SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
- rules are followed, but IP addresses are not accepted for *hostname*.
-
- CertificateError is raised on failure. On success, the function
- returns nothing.
- """
- if not cert:
- raise ValueError("empty or no certificate")
- dnsnames = []
- san = cert.get('subjectAltName', ())
- for key, value in san:
- if key == 'DNS':
- if _dnsname_match(value, hostname):
- return
- dnsnames.append(value)
- if not dnsnames:
- # The subject is only checked when there is no dNSName entry
- # in subjectAltName
- for sub in cert.get('subject', ()):
- for key, value in sub:
- # XXX according to RFC 2818, the most specific Common Name
- # must be used.
- if key == 'commonName':
- if _dnsname_match(value, hostname):
- return
- dnsnames.append(value)
- if len(dnsnames) > 1:
- raise CertificateError("hostname %r "
- "doesn't match either of %s"
- % (hostname, ', '.join(map(repr, dnsnames))))
- elif len(dnsnames) == 1:
- raise CertificateError("hostname %r "
- "doesn't match %r"
- % (hostname, dnsnames[0]))
- else:
- raise CertificateError("no appropriate commonName or "
- "subjectAltName fields were found")
-
-
-class VerifyingHTTPSHandler(HTTPSHandler):
- """Simple verifying handler: no auth, subclasses, timeouts, etc."""
-
- def __init__(self, ca_bundle):
- self.ca_bundle = ca_bundle
- HTTPSHandler.__init__(self)
-
- def https_open(self, req):
- return self.do_open(
- lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req
- )
-
-
-class VerifyingHTTPSConn(HTTPSConnection):
- """Simple verifying connection: no auth, subclasses, timeouts, etc."""
- def __init__(self, host, ca_bundle, **kw):
- HTTPSConnection.__init__(self, host, **kw)
- self.ca_bundle = ca_bundle
-
- def connect(self):
- sock = socket.create_connection(
- (self.host, self.port), getattr(self, 'source_address', None)
- )
-
- # Handle the socket if a (proxy) tunnel is present
- if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None):
- self.sock = sock
- self._tunnel()
- # http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7
- # change self.host to mean the proxy server host when tunneling is
- # being used. Adapt, since we are interested in the destination
- # host for the match_hostname() comparison.
- actual_host = self._tunnel_host
- else:
- actual_host = self.host
-
- self.sock = ssl.wrap_socket(
- sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle
- )
- try:
- match_hostname(self.sock.getpeercert(), actual_host)
- except CertificateError:
- self.sock.shutdown(socket.SHUT_RDWR)
- self.sock.close()
- raise
-
-def opener_for(ca_bundle=None):
- """Get a urlopen() replacement that uses ca_bundle for verification"""
- return urllib.request.build_opener(
- VerifyingHTTPSHandler(ca_bundle or find_ca_bundle())
- ).open
-
-
-_wincerts = None
-
-def get_win_certfile():
- global _wincerts
- if _wincerts is not None:
- return _wincerts.name
-
- try:
- from wincertstore import CertFile
- except ImportError:
- return None
-
- class MyCertFile(CertFile):
- def __init__(self, stores=(), certs=()):
- CertFile.__init__(self)
- for store in stores:
- self.addstore(store)
- self.addcerts(certs)
- atexit.register(self.close)
-
- def close(self):
- try:
- super(MyCertFile, self).close()
- except OSError:
- pass
-
- _wincerts = MyCertFile(stores=['CA', 'ROOT'])
- return _wincerts.name
-
-
-def find_ca_bundle():
- """Return an existing CA bundle path, or None"""
- if os.name=='nt':
- return get_win_certfile()
- else:
- for cert_path in cert_paths:
- if os.path.isfile(cert_path):
- return cert_path
- try:
- return pkg_resources.resource_filename('certifi', 'cacert.pem')
- except (ImportError, ResolutionError, ExtractionError):
- return None
diff --git a/setuptools/tests/__init__.py b/setuptools/tests/__init__.py
deleted file mode 100644
index 32447356..00000000
--- a/setuptools/tests/__init__.py
+++ /dev/null
@@ -1,328 +0,0 @@
-"""Tests for the 'setuptools' package"""
-import sys
-import os
-import distutils.core
-import distutils.cmd
-from distutils.errors import DistutilsOptionError, DistutilsPlatformError
-from distutils.errors import DistutilsSetupError
-from distutils.core import Extension
-from distutils.version import LooseVersion
-
-from setuptools.extern import six
-import pytest
-
-import setuptools.dist
-import setuptools.depends as dep
-from setuptools import Feature
-from setuptools.depends import Require
-
-c_type = os.environ.get("LC_CTYPE", os.environ.get("LC_ALL"))
-is_ascii = c_type in ("C", "POSIX")
-fail_on_ascii = pytest.mark.xfail(is_ascii, reason="Test fails in this locale")
-
-
-def makeSetup(**args):
- """Return distribution from 'setup(**args)', without executing commands"""
-
- distutils.core._setup_stop_after = "commandline"
-
- # Don't let system command line leak into tests!
- args.setdefault('script_args',['install'])
-
- try:
- return setuptools.setup(**args)
- finally:
- distutils.core._setup_stop_after = None
-
-
-needs_bytecode = pytest.mark.skipif(
- not hasattr(dep, 'get_module_constant'),
- reason="bytecode support not available",
-)
-
-class TestDepends:
-
- def testExtractConst(self):
- if not hasattr(dep, 'extract_constant'):
- # skip on non-bytecode platforms
- return
-
- def f1():
- global x, y, z
- x = "test"
- y = z
-
- fc = six.get_function_code(f1)
-
- # unrecognized name
- assert dep.extract_constant(fc,'q', -1) is None
-
- # constant assigned
- dep.extract_constant(fc,'x', -1) == "test"
-
- # expression assigned
- dep.extract_constant(fc,'y', -1) == -1
-
- # recognized name, not assigned
- dep.extract_constant(fc,'z', -1) is None
-
- def testFindModule(self):
- with pytest.raises(ImportError):
- dep.find_module('no-such.-thing')
- with pytest.raises(ImportError):
- dep.find_module('setuptools.non-existent')
- f,p,i = dep.find_module('setuptools.tests')
- f.close()
-
- @needs_bytecode
- def testModuleExtract(self):
- from email import __version__
- assert dep.get_module_constant('email','__version__') == __version__
- assert dep.get_module_constant('sys','version') == sys.version
- assert dep.get_module_constant('setuptools.tests','__doc__') == __doc__
-
- @needs_bytecode
- def testRequire(self):
- req = Require('Email','1.0.3','email')
-
- assert req.name == 'Email'
- assert req.module == 'email'
- assert req.requested_version == '1.0.3'
- assert req.attribute == '__version__'
- assert req.full_name() == 'Email-1.0.3'
-
- from email import __version__
- assert req.get_version() == __version__
- assert req.version_ok('1.0.9')
- assert not req.version_ok('0.9.1')
- assert not req.version_ok('unknown')
-
- assert req.is_present()
- assert req.is_current()
-
- req = Require('Email 3000','03000','email',format=LooseVersion)
- assert req.is_present()
- assert not req.is_current()
- assert not req.version_ok('unknown')
-
- req = Require('Do-what-I-mean','1.0','d-w-i-m')
- assert not req.is_present()
- assert not req.is_current()
-
- req = Require('Tests', None, 'tests', homepage="http://example.com")
- assert req.format is None
- assert req.attribute is None
- assert req.requested_version is None
- assert req.full_name() == 'Tests'
- assert req.homepage == 'http://example.com'
-
- paths = [os.path.dirname(p) for p in __path__]
- assert req.is_present(paths)
- assert req.is_current(paths)
-
-
-class TestDistro:
-
- def setup_method(self, method):
- self.e1 = Extension('bar.ext',['bar.c'])
- self.e2 = Extension('c.y', ['y.c'])
-
- self.dist = makeSetup(
- packages=['a', 'a.b', 'a.b.c', 'b', 'c'],
- py_modules=['b.d','x'],
- ext_modules = (self.e1, self.e2),
- package_dir = {},
- )
-
- def testDistroType(self):
- assert isinstance(self.dist,setuptools.dist.Distribution)
-
- def testExcludePackage(self):
- self.dist.exclude_package('a')
- assert self.dist.packages == ['b','c']
-
- self.dist.exclude_package('b')
- assert self.dist.packages == ['c']
- assert self.dist.py_modules == ['x']
- assert self.dist.ext_modules == [self.e1, self.e2]
-
- self.dist.exclude_package('c')
- assert self.dist.packages == []
- assert self.dist.py_modules == ['x']
- assert self.dist.ext_modules == [self.e1]
-
- # test removals from unspecified options
- makeSetup().exclude_package('x')
-
- def testIncludeExclude(self):
- # remove an extension
- self.dist.exclude(ext_modules=[self.e1])
- assert self.dist.ext_modules == [self.e2]
-
- # add it back in
- self.dist.include(ext_modules=[self.e1])
- assert self.dist.ext_modules == [self.e2, self.e1]
-
- # should not add duplicate
- self.dist.include(ext_modules=[self.e1])
- assert self.dist.ext_modules == [self.e2, self.e1]
-
- def testExcludePackages(self):
- self.dist.exclude(packages=['c','b','a'])
- assert self.dist.packages == []
- assert self.dist.py_modules == ['x']
- assert self.dist.ext_modules == [self.e1]
-
- def testEmpty(self):
- dist = makeSetup()
- dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
- dist = makeSetup()
- dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
-
- def testContents(self):
- assert self.dist.has_contents_for('a')
- self.dist.exclude_package('a')
- assert not self.dist.has_contents_for('a')
-
- assert self.dist.has_contents_for('b')
- self.dist.exclude_package('b')
- assert not self.dist.has_contents_for('b')
-
- assert self.dist.has_contents_for('c')
- self.dist.exclude_package('c')
- assert not self.dist.has_contents_for('c')
-
- def testInvalidIncludeExclude(self):
- with pytest.raises(DistutilsSetupError):
- self.dist.include(nonexistent_option='x')
- with pytest.raises(DistutilsSetupError):
- self.dist.exclude(nonexistent_option='x')
- with pytest.raises(DistutilsSetupError):
- self.dist.include(packages={'x':'y'})
- with pytest.raises(DistutilsSetupError):
- self.dist.exclude(packages={'x':'y'})
- with pytest.raises(DistutilsSetupError):
- self.dist.include(ext_modules={'x':'y'})
- with pytest.raises(DistutilsSetupError):
- self.dist.exclude(ext_modules={'x':'y'})
-
- with pytest.raises(DistutilsSetupError):
- self.dist.include(package_dir=['q'])
- with pytest.raises(DistutilsSetupError):
- self.dist.exclude(package_dir=['q'])
-
-
-class TestFeatures:
-
- def setup_method(self, method):
- self.req = Require('Distutils','1.0.3','distutils')
- self.dist = makeSetup(
- features={
- 'foo': Feature("foo",standard=True,require_features=['baz',self.req]),
- 'bar': Feature("bar", standard=True, packages=['pkg.bar'],
- py_modules=['bar_et'], remove=['bar.ext'],
- ),
- 'baz': Feature(
- "baz", optional=False, packages=['pkg.baz'],
- scripts = ['scripts/baz_it'],
- libraries=[('libfoo','foo/foofoo.c')]
- ),
- 'dwim': Feature("DWIM", available=False, remove='bazish'),
- },
- script_args=['--without-bar', 'install'],
- packages = ['pkg.bar', 'pkg.foo'],
- py_modules = ['bar_et', 'bazish'],
- ext_modules = [Extension('bar.ext',['bar.c'])]
- )
-
- def testDefaults(self):
- assert not Feature(
- "test",standard=True,remove='x',available=False
- ).include_by_default()
- assert Feature("test",standard=True,remove='x').include_by_default()
- # Feature must have either kwargs, removes, or require_features
- with pytest.raises(DistutilsSetupError):
- Feature("test")
-
- def testAvailability(self):
- with pytest.raises(DistutilsPlatformError):
- self.dist.features['dwim'].include_in(self.dist)
-
- def testFeatureOptions(self):
- dist = self.dist
- assert (
- ('with-dwim',None,'include DWIM') in dist.feature_options
- )
- assert (
- ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options
- )
- assert (
- ('with-bar',None,'include bar (default)') in dist.feature_options
- )
- assert (
- ('without-bar',None,'exclude bar') in dist.feature_options
- )
- assert dist.feature_negopt['without-foo'] == 'with-foo'
- assert dist.feature_negopt['without-bar'] == 'with-bar'
- assert dist.feature_negopt['without-dwim'] == 'with-dwim'
- assert (not 'without-baz' in dist.feature_negopt)
-
- def testUseFeatures(self):
- dist = self.dist
- assert dist.with_foo == 1
- assert dist.with_bar == 0
- assert dist.with_baz == 1
- assert (not 'bar_et' in dist.py_modules)
- assert (not 'pkg.bar' in dist.packages)
- assert ('pkg.baz' in dist.packages)
- assert ('scripts/baz_it' in dist.scripts)
- assert (('libfoo','foo/foofoo.c') in dist.libraries)
- assert dist.ext_modules == []
- assert dist.require_features == [self.req]
-
- # If we ask for bar, it should fail because we explicitly disabled
- # it on the command line
- with pytest.raises(DistutilsOptionError):
- dist.include_feature('bar')
-
- def testFeatureWithInvalidRemove(self):
- with pytest.raises(SystemExit):
- makeSetup(features={'x':Feature('x', remove='y')})
-
-class TestCommandTests:
-
- def testTestIsCommand(self):
- test_cmd = makeSetup().get_command_obj('test')
- assert (isinstance(test_cmd, distutils.cmd.Command))
-
- def testLongOptSuiteWNoDefault(self):
- ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite'])
- ts1 = ts1.get_command_obj('test')
- ts1.ensure_finalized()
- assert ts1.test_suite == 'foo.tests.suite'
-
- def testDefaultSuite(self):
- ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test')
- ts2.ensure_finalized()
- assert ts2.test_suite == 'bar.tests.suite'
-
- def testDefaultWModuleOnCmdLine(self):
- ts3 = makeSetup(
- test_suite='bar.tests',
- script_args=['test','-m','foo.tests']
- ).get_command_obj('test')
- ts3.ensure_finalized()
- assert ts3.test_module == 'foo.tests'
- assert ts3.test_suite == 'foo.tests.test_suite'
-
- def testConflictingOptions(self):
- ts4 = makeSetup(
- script_args=['test','-m','bar.tests', '-s','foo.tests.suite']
- ).get_command_obj('test')
- with pytest.raises(DistutilsOptionError):
- ts4.ensure_finalized()
-
- def testNoSuite(self):
- ts5 = makeSetup().get_command_obj('test')
- ts5.ensure_finalized()
- assert ts5.test_suite == None
diff --git a/setuptools/tests/contexts.py b/setuptools/tests/contexts.py
deleted file mode 100644
index ae28c7c3..00000000
--- a/setuptools/tests/contexts.py
+++ /dev/null
@@ -1,98 +0,0 @@
-import tempfile
-import os
-import shutil
-import sys
-import contextlib
-import site
-
-from setuptools.extern import six
-import pkg_resources
-
-
-@contextlib.contextmanager
-def tempdir(cd=lambda dir:None, **kwargs):
- temp_dir = tempfile.mkdtemp(**kwargs)
- orig_dir = os.getcwd()
- try:
- cd(temp_dir)
- yield temp_dir
- finally:
- cd(orig_dir)
- shutil.rmtree(temp_dir)
-
-
-@contextlib.contextmanager
-def environment(**replacements):
- """
- In a context, patch the environment with replacements. Pass None values
- to clear the values.
- """
- saved = dict(
- (key, os.environ[key])
- for key in replacements
- if key in os.environ
- )
-
- # remove values that are null
- remove = (key for (key, value) in replacements.items() if value is None)
- for key in list(remove):
- os.environ.pop(key, None)
- replacements.pop(key)
-
- os.environ.update(replacements)
-
- try:
- yield saved
- finally:
- for key in replacements:
- os.environ.pop(key, None)
- os.environ.update(saved)
-
-
-@contextlib.contextmanager
-def quiet():
- """
- Redirect stdout/stderr to StringIO objects to prevent console output from
- distutils commands.
- """
-
- old_stdout = sys.stdout
- old_stderr = sys.stderr
- new_stdout = sys.stdout = six.StringIO()
- new_stderr = sys.stderr = six.StringIO()
- try:
- yield new_stdout, new_stderr
- finally:
- new_stdout.seek(0)
- new_stderr.seek(0)
- sys.stdout = old_stdout
- sys.stderr = old_stderr
-
-
-@contextlib.contextmanager
-def save_user_site_setting():
- saved = site.ENABLE_USER_SITE
- try:
- yield saved
- finally:
- site.ENABLE_USER_SITE = saved
-
-
-@contextlib.contextmanager
-def save_pkg_resources_state():
- pr_state = pkg_resources.__getstate__()
- # also save sys.path
- sys_path = sys.path[:]
- try:
- yield pr_state, sys_path
- finally:
- sys.path[:] = sys_path
- pkg_resources.__setstate__(pr_state)
-
-
-@contextlib.contextmanager
-def suppress_exceptions(*excs):
- try:
- yield
- except excs:
- pass
diff --git a/setuptools/tests/environment.py b/setuptools/tests/environment.py
deleted file mode 100644
index a23c0504..00000000
--- a/setuptools/tests/environment.py
+++ /dev/null
@@ -1,60 +0,0 @@
-import os
-import sys
-import unicodedata
-
-from subprocess import Popen as _Popen, PIPE as _PIPE
-
-
-def _which_dirs(cmd):
- result = set()
- for path in os.environ.get('PATH', '').split(os.pathsep):
- filename = os.path.join(path, cmd)
- if os.access(filename, os.X_OK):
- result.add(path)
- return result
-
-
-def run_setup_py(cmd, pypath=None, path=None,
- data_stream=0, env=None):
- """
- Execution command for tests, separate from those used by the
- code directly to prevent accidental behavior issues
- """
- if env is None:
- env = dict()
- for envname in os.environ:
- env[envname] = os.environ[envname]
-
- #override the python path if needed
- if pypath is not None:
- env["PYTHONPATH"] = pypath
-
- #overide the execution path if needed
- if path is not None:
- env["PATH"] = path
- if not env.get("PATH", ""):
- env["PATH"] = _which_dirs("tar").union(_which_dirs("gzip"))
- env["PATH"] = os.pathsep.join(env["PATH"])
-
- cmd = [sys.executable, "setup.py"] + list(cmd)
-
- # http://bugs.python.org/issue8557
- shell = sys.platform == 'win32'
-
- try:
- proc = _Popen(
- cmd, stdout=_PIPE, stderr=_PIPE, shell=shell, env=env,
- )
-
- data = proc.communicate()[data_stream]
- except OSError:
- return 1, ''
-
- #decode the console string if needed
- if hasattr(data, "decode"):
- # use the default encoding
- data = data.decode()
- data = unicodedata.normalize('NFC', data)
-
- #communciate calls wait()
- return proc.returncode, data
diff --git a/setuptools/tests/files.py b/setuptools/tests/files.py
deleted file mode 100644
index 4364241b..00000000
--- a/setuptools/tests/files.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import os
-
-
-def build_files(file_defs, prefix=""):
- """
- Build a set of files/directories, as described by the file_defs dictionary.
-
- Each key/value pair in the dictionary is interpreted as a filename/contents
- pair. If the contents value is a dictionary, a directory is created, and the
- dictionary interpreted as the files within it, recursively.
-
- For example:
-
- {"README.txt": "A README file",
- "foo": {
- "__init__.py": "",
- "bar": {
- "__init__.py": "",
- },
- "baz.py": "# Some code",
- }
- }
- """
- for name, contents in file_defs.items():
- full_name = os.path.join(prefix, name)
- if isinstance(contents, dict):
- if not os.path.exists(full_name):
- os.makedirs(full_name)
- build_files(contents, prefix=full_name)
- else:
- with open(full_name, 'w') as f:
- f.write(contents)
diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py
deleted file mode 100644
index c70c38cb..00000000
--- a/setuptools/tests/fixtures.py
+++ /dev/null
@@ -1,27 +0,0 @@
-try:
- from unittest import mock
-except ImportError:
- import mock
-import pytest
-
-from . import contexts
-
-
-@pytest.yield_fixture
-def user_override():
- """
- Override site.USER_BASE and site.USER_SITE with temporary directories in
- a context.
- """
- with contexts.tempdir() as user_base:
- with mock.patch('site.USER_BASE', user_base):
- with contexts.tempdir() as user_site:
- with mock.patch('site.USER_SITE', user_site):
- with contexts.save_user_site_setting():
- yield
-
-
-@pytest.yield_fixture
-def tmpdir_cwd(tmpdir):
- with tmpdir.as_cwd() as orig:
- yield orig
diff --git a/setuptools/tests/indexes/test_links_priority/external.html b/setuptools/tests/indexes/test_links_priority/external.html
deleted file mode 100644
index 92e4702f..00000000
--- a/setuptools/tests/indexes/test_links_priority/external.html
+++ /dev/null
@@ -1,3 +0,0 @@
-<html><body>
-<a href="/foobar-0.1.tar.gz#md5=1__bad_md5___">bad old link</a>
-</body></html>
diff --git a/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html b/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html
deleted file mode 100644
index fefb028b..00000000
--- a/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html
+++ /dev/null
@@ -1,4 +0,0 @@
-<html><body>
-<a href="/foobar-0.1.tar.gz#md5=0_correct_md5">foobar-0.1.tar.gz</a><br/>
-<a href="../../external.html" rel="homepage">external homepage</a><br/>
-</body></html>
diff --git a/setuptools/tests/py26compat.py b/setuptools/tests/py26compat.py
deleted file mode 100644
index c5680881..00000000
--- a/setuptools/tests/py26compat.py
+++ /dev/null
@@ -1,14 +0,0 @@
-import sys
-import tarfile
-import contextlib
-
-def _tarfile_open_ex(*args, **kwargs):
- """
- Extend result as a context manager.
- """
- return contextlib.closing(tarfile.open(*args, **kwargs))
-
-if sys.version_info[:2] < (2, 7) or (3, 0) <= sys.version_info[:2] < (3, 2):
- tarfile_open = _tarfile_open_ex
-else:
- tarfile_open = tarfile.open
diff --git a/setuptools/tests/script-with-bom.py b/setuptools/tests/script-with-bom.py
deleted file mode 100644
index 22dee0d2..00000000
--- a/setuptools/tests/script-with-bom.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# -*- coding: utf-8 -*-
-
-result = 'passed'
diff --git a/setuptools/tests/server.py b/setuptools/tests/server.py
deleted file mode 100644
index 6a687937..00000000
--- a/setuptools/tests/server.py
+++ /dev/null
@@ -1,68 +0,0 @@
-"""Basic http server for tests to simulate PyPI or custom indexes
-"""
-
-import time
-import threading
-
-from setuptools.extern.six.moves import BaseHTTPServer, SimpleHTTPServer
-
-
-class IndexServer(BaseHTTPServer.HTTPServer):
- """Basic single-threaded http server simulating a package index
-
- You can use this server in unittest like this::
- s = IndexServer()
- s.start()
- index_url = s.base_url() + 'mytestindex'
- # do some test requests to the index
- # The index files should be located in setuptools/tests/indexes
- s.stop()
- """
- def __init__(self, server_address=('', 0),
- RequestHandlerClass=SimpleHTTPServer.SimpleHTTPRequestHandler):
- BaseHTTPServer.HTTPServer.__init__(self, server_address,
- RequestHandlerClass)
- self._run = True
-
- def start(self):
- self.thread = threading.Thread(target=self.serve_forever)
- self.thread.start()
-
- def stop(self):
- "Stop the server"
-
- # Let the server finish the last request and wait for a new one.
- time.sleep(0.1)
-
- self.shutdown()
- self.thread.join()
- self.socket.close()
-
- def base_url(self):
- port = self.server_port
- return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port
-
-class RequestRecorder(BaseHTTPServer.BaseHTTPRequestHandler):
- def do_GET(self):
- requests = vars(self.server).setdefault('requests', [])
- requests.append(self)
- self.send_response(200, 'OK')
-
-class MockServer(BaseHTTPServer.HTTPServer, threading.Thread):
- """
- A simple HTTP Server that records the requests made to it.
- """
- def __init__(self, server_address=('', 0),
- RequestHandlerClass=RequestRecorder):
- BaseHTTPServer.HTTPServer.__init__(self, server_address,
- RequestHandlerClass)
- threading.Thread.__init__(self)
- self.setDaemon(True)
- self.requests = []
-
- def run(self):
- self.serve_forever()
-
- @property
- def url(self):
- return 'http://localhost:%(server_port)s/' % vars(self)
diff --git a/setuptools/tests/test_bdist_egg.py b/setuptools/tests/test_bdist_egg.py
deleted file mode 100644
index ccfb2ea7..00000000
--- a/setuptools/tests/test_bdist_egg.py
+++ /dev/null
@@ -1,43 +0,0 @@
-"""develop tests
-"""
-import os
-import re
-
-import pytest
-
-from setuptools.dist import Distribution
-
-from . import contexts
-
-SETUP_PY = """\
-from setuptools import setup
-
-setup(name='foo', py_modules=['hi'])
-"""
-
-@pytest.yield_fixture
-def setup_context(tmpdir):
- with (tmpdir/'setup.py').open('w') as f:
- f.write(SETUP_PY)
- with (tmpdir/'hi.py').open('w') as f:
- f.write('1\n')
- with tmpdir.as_cwd():
- yield tmpdir
-
-
-class Test:
- def test_bdist_egg(self, setup_context, user_override):
- dist = Distribution(dict(
- script_name='setup.py',
- script_args=['bdist_egg'],
- name='foo',
- py_modules=['hi']
- ))
- os.makedirs(os.path.join('build', 'src'))
- with contexts.quiet():
- dist.parse_command_line()
- dist.run_commands()
-
- # let's see if we got our egg link at the right place
- [content] = os.listdir('dist')
- assert re.match('foo-0.0.0-py[23].\d.egg$', content)
diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py
deleted file mode 100644
index 0719ba44..00000000
--- a/setuptools/tests/test_build_ext.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import distutils.command.build_ext as orig
-
-from setuptools.command.build_ext import build_ext
-from setuptools.dist import Distribution
-
-class TestBuildExt:
- def test_get_ext_filename(self):
- """
- Setuptools needs to give back the same
- result as distutils, even if the fullname
- is not in ext_map.
- """
- dist = Distribution()
- cmd = build_ext(dist)
- cmd.ext_map['foo/bar'] = ''
- res = cmd.get_ext_filename('foo')
- wanted = orig.build_ext.get_ext_filename(cmd, 'foo')
- assert res == wanted
diff --git a/setuptools/tests/test_develop.py b/setuptools/tests/test_develop.py
deleted file mode 100644
index 1b844499..00000000
--- a/setuptools/tests/test_develop.py
+++ /dev/null
@@ -1,115 +0,0 @@
-"""develop tests
-"""
-import os
-import site
-import sys
-import io
-
-from setuptools.extern import six
-
-import pytest
-
-from setuptools.command.develop import develop
-from setuptools.dist import Distribution
-from . import contexts
-
-
-SETUP_PY = """\
-from setuptools import setup
-
-setup(name='foo',
- packages=['foo'],
- use_2to3=True,
-)
-"""
-
-INIT_PY = """print "foo"
-"""
-
-@pytest.yield_fixture
-def temp_user(monkeypatch):
- with contexts.tempdir() as user_base:
- with contexts.tempdir() as user_site:
- monkeypatch.setattr('site.USER_BASE', user_base)
- monkeypatch.setattr('site.USER_SITE', user_site)
- yield
-
-
-@pytest.yield_fixture
-def test_env(tmpdir, temp_user):
- target = tmpdir
- foo = target.mkdir('foo')
- setup = target / 'setup.py'
- if setup.isfile():
- raise ValueError(dir(target))
- with setup.open('w') as f:
- f.write(SETUP_PY)
- init = foo / '__init__.py'
- with init.open('w') as f:
- f.write(INIT_PY)
- with target.as_cwd():
- yield target
-
-
-class TestDevelop:
- in_virtualenv = hasattr(sys, 'real_prefix')
- in_venv = hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix
- @pytest.mark.skipif(in_virtualenv or in_venv,
- reason="Cannot run when invoked in a virtualenv or venv")
- def test_2to3_user_mode(self, test_env):
- settings = dict(
- name='foo',
- packages=['foo'],
- use_2to3=True,
- version='0.0',
- )
- dist = Distribution(settings)
- dist.script_name = 'setup.py'
- cmd = develop(dist)
- cmd.user = 1
- cmd.ensure_finalized()
- cmd.install_dir = site.USER_SITE
- cmd.user = 1
- with contexts.quiet():
- cmd.run()
-
- # let's see if we got our egg link at the right place
- content = os.listdir(site.USER_SITE)
- content.sort()
- assert content == ['easy-install.pth', 'foo.egg-link']
-
- # Check that we are using the right code.
- fn = os.path.join(site.USER_SITE, 'foo.egg-link')
- with io.open(fn) as egg_link_file:
- path = egg_link_file.read().split()[0].strip()
- fn = os.path.join(path, 'foo', '__init__.py')
- with io.open(fn) as init_file:
- init = init_file.read().strip()
-
- expected = 'print("foo")' if six.PY3 else 'print "foo"'
- assert init == expected
-
- def test_console_scripts(self, tmpdir):
- """
- Test that console scripts are installed and that they reference
- only the project by name and not the current version.
- """
- pytest.skip("TODO: needs a fixture to cause 'develop' "
- "to be invoked without mutating environment.")
- settings = dict(
- name='foo',
- packages=['foo'],
- version='0.0',
- entry_points={
- 'console_scripts': [
- 'foocmd = foo:foo',
- ],
- },
- )
- dist = Distribution(settings)
- dist.script_name = 'setup.py'
- cmd = develop(dist)
- cmd.ensure_finalized()
- cmd.install_dir = tmpdir
- cmd.run()
- #assert '0.0' not in foocmd_text
diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py
deleted file mode 100644
index 9f226a55..00000000
--- a/setuptools/tests/test_dist_info.py
+++ /dev/null
@@ -1,73 +0,0 @@
-"""Test .dist-info style distributions.
-"""
-import os
-import shutil
-import tempfile
-
-from setuptools.extern.six.moves import map
-
-import pytest
-
-import pkg_resources
-from .textwrap import DALS
-
-
-class TestDistInfo:
-
- def test_distinfo(self):
- dists = dict(
- (d.project_name, d)
- for d in pkg_resources.find_distributions(self.tmpdir)
- )
-
- assert len(dists) == 2, dists
-
- unversioned = dists['UnversionedDistribution']
- versioned = dists['VersionedDistribution']
-
- assert versioned.version == '2.718' # from filename
- assert unversioned.version == '0.3' # from METADATA
-
- def test_conditional_dependencies(self):
- specs = 'splort==4', 'quux>=1.1'
- requires = list(map(pkg_resources.Requirement.parse, specs))
-
- for d in pkg_resources.find_distributions(self.tmpdir):
- assert d.requires() == requires[:1]
- assert d.requires(extras=('baz',)) == [
- requires[0],
- pkg_resources.Requirement.parse('quux>=1.1;extra=="baz"')]
- assert d.extras == ['baz']
-
- metadata_template = DALS("""
- Metadata-Version: 1.2
- Name: {name}
- {version}
- Requires-Dist: splort (==4)
- Provides-Extra: baz
- Requires-Dist: quux (>=1.1); extra == 'baz'
- """)
-
- def setup_method(self, method):
- self.tmpdir = tempfile.mkdtemp()
- dist_info_name = 'VersionedDistribution-2.718.dist-info'
- versioned = os.path.join(self.tmpdir, dist_info_name)
- os.mkdir(versioned)
- with open(os.path.join(versioned, 'METADATA'), 'w+') as metadata_file:
- metadata = self.metadata_template.format(
- name='VersionedDistribution',
- version='',
- ).replace('\n\n', '\n')
- metadata_file.write(metadata)
- dist_info_name = 'UnversionedDistribution.dist-info'
- unversioned = os.path.join(self.tmpdir, dist_info_name)
- os.mkdir(unversioned)
- with open(os.path.join(unversioned, 'METADATA'), 'w+') as metadata_file:
- metadata = self.metadata_template.format(
- name='UnversionedDistribution',
- version='Version: 0.3',
- )
- metadata_file.write(metadata)
-
- def teardown_method(self, method):
- shutil.rmtree(self.tmpdir)
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
deleted file mode 100644
index 55b8b05a..00000000
--- a/setuptools/tests/test_easy_install.py
+++ /dev/null
@@ -1,599 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""Easy install Tests
-"""
-from __future__ import absolute_import
-
-import sys
-import os
-import shutil
-import tempfile
-import site
-import contextlib
-import tarfile
-import logging
-import itertools
-import distutils.errors
-import io
-
-from setuptools.extern.six.moves import urllib
-import time
-
-import pytest
-try:
- from unittest import mock
-except ImportError:
- import mock
-
-from setuptools import sandbox
-from setuptools.sandbox import run_setup
-import setuptools.command.easy_install as ei
-from setuptools.command.easy_install import PthDistributions
-from setuptools.command import easy_install as easy_install_pkg
-from setuptools.dist import Distribution
-from pkg_resources import working_set
-from pkg_resources import Distribution as PRDistribution
-import setuptools.tests.server
-import pkg_resources
-
-from .py26compat import tarfile_open
-from . import contexts
-from .textwrap import DALS
-
-
-class FakeDist(object):
- def get_entry_map(self, group):
- if group != 'console_scripts':
- return {}
- return {'name': 'ep'}
-
- def as_requirement(self):
- return 'spec'
-
-SETUP_PY = DALS("""
- from setuptools import setup
-
- setup(name='foo')
- """)
-
-class TestEasyInstallTest:
-
- def test_install_site_py(self, tmpdir):
- dist = Distribution()
- cmd = ei.easy_install(dist)
- cmd.sitepy_installed = False
- cmd.install_dir = str(tmpdir)
- cmd.install_site_py()
- assert (tmpdir / 'site.py').exists()
-
- def test_get_script_args(self):
- header = ei.CommandSpec.best().from_environment().as_header()
- expected = header + DALS("""
- # EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name'
- __requires__ = 'spec'
- import sys
- from pkg_resources import load_entry_point
-
- if __name__ == '__main__':
- sys.exit(
- load_entry_point('spec', 'console_scripts', 'name')()
- )
- """)
- dist = FakeDist()
-
- args = next(ei.ScriptWriter.get_args(dist))
- name, script = itertools.islice(args, 2)
-
- assert script == expected
-
- def test_no_find_links(self):
- # new option '--no-find-links', that blocks find-links added at
- # the project level
- dist = Distribution()
- cmd = ei.easy_install(dist)
- cmd.check_pth_processing = lambda: True
- cmd.no_find_links = True
- cmd.find_links = ['link1', 'link2']
- cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
- cmd.args = ['ok']
- cmd.ensure_finalized()
- assert cmd.package_index.scanned_urls == {}
-
- # let's try without it (default behavior)
- cmd = ei.easy_install(dist)
- cmd.check_pth_processing = lambda: True
- cmd.find_links = ['link1', 'link2']
- cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
- cmd.args = ['ok']
- cmd.ensure_finalized()
- keys = sorted(cmd.package_index.scanned_urls.keys())
- assert keys == ['link1', 'link2']
-
- def test_write_exception(self):
- """
- Test that `cant_write_to_target` is rendered as a DistutilsError.
- """
- dist = Distribution()
- cmd = ei.easy_install(dist)
- cmd.install_dir = os.getcwd()
- with pytest.raises(distutils.errors.DistutilsError):
- cmd.cant_write_to_target()
-
-
-class TestPTHFileWriter:
- def test_add_from_cwd_site_sets_dirty(self):
- '''a pth file manager should set dirty
- if a distribution is in site but also the cwd
- '''
- pth = PthDistributions('does-not_exist', [os.getcwd()])
- assert not pth.dirty
- pth.add(PRDistribution(os.getcwd()))
- assert pth.dirty
-
- def test_add_from_site_is_ignored(self):
- location = '/test/location/does-not-have-to-exist'
- # PthDistributions expects all locations to be normalized
- location = pkg_resources.normalize_path(location)
- pth = PthDistributions('does-not_exist', [location, ])
- assert not pth.dirty
- pth.add(PRDistribution(location))
- assert not pth.dirty
-
-
-@pytest.yield_fixture
-def setup_context(tmpdir):
- with (tmpdir/'setup.py').open('w') as f:
- f.write(SETUP_PY)
- with tmpdir.as_cwd():
- yield tmpdir
-
-
-@pytest.mark.usefixtures("user_override")
-@pytest.mark.usefixtures("setup_context")
-class TestUserInstallTest:
-
- # prevent check that site-packages is writable. easy_install
- # shouldn't be writing to system site-packages during finalize
- # options, but while it does, bypass the behavior.
- prev_sp_write = mock.patch(
- 'setuptools.command.easy_install.easy_install.check_site_dir',
- mock.Mock(),
- )
-
- # simulate setuptools installed in user site packages
- @mock.patch('setuptools.command.easy_install.__file__', site.USER_SITE)
- @mock.patch('site.ENABLE_USER_SITE', True)
- @prev_sp_write
- def test_user_install_not_implied_user_site_enabled(self):
- self.assert_not_user_site()
-
- @mock.patch('site.ENABLE_USER_SITE', False)
- @prev_sp_write
- def test_user_install_not_implied_user_site_disabled(self):
- self.assert_not_user_site()
-
- @staticmethod
- def assert_not_user_site():
- # create a finalized easy_install command
- dist = Distribution()
- dist.script_name = 'setup.py'
- cmd = ei.easy_install(dist)
- cmd.args = ['py']
- cmd.ensure_finalized()
- assert not cmd.user, 'user should not be implied'
-
- def test_multiproc_atexit(self):
- pytest.importorskip('multiprocessing')
-
- log = logging.getLogger('test_easy_install')
- logging.basicConfig(level=logging.INFO, stream=sys.stderr)
- log.info('this should not break')
-
- @pytest.fixture()
- def foo_package(self, tmpdir):
- egg_file = tmpdir / 'foo-1.0.egg-info'
- with egg_file.open('w') as f:
- f.write('Name: foo\n')
- return str(tmpdir)
-
- @pytest.yield_fixture()
- def install_target(self, tmpdir):
- target = str(tmpdir)
- with mock.patch('sys.path', sys.path + [target]):
- python_path = os.path.pathsep.join(sys.path)
- with mock.patch.dict(os.environ, PYTHONPATH=python_path):
- yield target
-
- def test_local_index(self, foo_package, install_target):
- """
- The local index must be used when easy_install locates installed
- packages.
- """
- dist = Distribution()
- dist.script_name = 'setup.py'
- cmd = ei.easy_install(dist)
- cmd.install_dir = install_target
- cmd.args = ['foo']
- cmd.ensure_finalized()
- cmd.local_index.scan([foo_package])
- res = cmd.easy_install('foo')
- actual = os.path.normcase(os.path.realpath(res.location))
- expected = os.path.normcase(os.path.realpath(foo_package))
- assert actual == expected
-
- @contextlib.contextmanager
- def user_install_setup_context(self, *args, **kwargs):
- """
- Wrap sandbox.setup_context to patch easy_install in that context to
- appear as user-installed.
- """
- with self.orig_context(*args, **kwargs):
- import setuptools.command.easy_install as ei
- ei.__file__ = site.USER_SITE
- yield
-
- def patched_setup_context(self):
- self.orig_context = sandbox.setup_context
-
- return mock.patch(
- 'setuptools.sandbox.setup_context',
- self.user_install_setup_context,
- )
-
-
-@pytest.yield_fixture
-def distutils_package():
- distutils_setup_py = SETUP_PY.replace(
- 'from setuptools import setup',
- 'from distutils.core import setup',
- )
- with contexts.tempdir(cd=os.chdir):
- with open('setup.py', 'w') as f:
- f.write(distutils_setup_py)
- yield
-
-
-class TestDistutilsPackage:
- def test_bdist_egg_available_on_distutils_pkg(self, distutils_package):
- run_setup('setup.py', ['bdist_egg'])
-
-
-class TestSetupRequires:
-
- def test_setup_requires_honors_fetch_params(self):
- """
- When easy_install installs a source distribution which specifies
- setup_requires, it should honor the fetch parameters (such as
- allow-hosts, index-url, and find-links).
- """
- # set up a server which will simulate an alternate package index.
- p_index = setuptools.tests.server.MockServer()
- p_index.start()
- netloc = 1
- p_index_loc = urllib.parse.urlparse(p_index.url)[netloc]
- if p_index_loc.endswith(':0'):
- # Some platforms (Jython) don't find a port to which to bind,
- # so skip this test for them.
- return
- with contexts.quiet():
- # create an sdist that has a build-time dependency.
- with TestSetupRequires.create_sdist() as dist_file:
- with contexts.tempdir() as temp_install_dir:
- with contexts.environment(PYTHONPATH=temp_install_dir):
- ei_params = [
- '--index-url', p_index.url,
- '--allow-hosts', p_index_loc,
- '--exclude-scripts',
- '--install-dir', temp_install_dir,
- dist_file,
- ]
- with sandbox.save_argv(['easy_install']):
- # attempt to install the dist. It should fail because
- # it doesn't exist.
- with pytest.raises(SystemExit):
- easy_install_pkg.main(ei_params)
- # there should have been two or three requests to the server
- # (three happens on Python 3.3a)
- assert 2 <= len(p_index.requests) <= 3
- assert p_index.requests[0].path == '/does-not-exist/'
-
- @staticmethod
- @contextlib.contextmanager
- def create_sdist():
- """
- Return an sdist with a setup_requires dependency (of something that
- doesn't exist)
- """
- with contexts.tempdir() as dir:
- dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz')
- make_sdist(dist_path, [
- ('setup.py', DALS("""
- import setuptools
- setuptools.setup(
- name="setuptools-test-fetcher",
- version="1.0",
- setup_requires = ['does-not-exist'],
- )
- """))])
- yield dist_path
-
- def test_setup_requires_overrides_version_conflict(self):
- """
- Regression test for distribution issue 323:
- https://bitbucket.org/tarek/distribute/issues/323
-
- Ensures that a distribution's setup_requires requirements can still be
- installed and used locally even if a conflicting version of that
- requirement is already on the path.
- """
-
- fake_dist = PRDistribution('does-not-matter', project_name='foobar',
- version='0.0')
- working_set.add(fake_dist)
-
- with contexts.save_pkg_resources_state():
- with contexts.tempdir() as temp_dir:
- test_pkg = create_setup_requires_package(temp_dir)
- test_setup_py = os.path.join(test_pkg, 'setup.py')
- with contexts.quiet() as (stdout, stderr):
- # Don't even need to install the package, just
- # running the setup.py at all is sufficient
- run_setup(test_setup_py, ['--name'])
-
- lines = stdout.readlines()
- assert len(lines) > 0
- assert lines[-1].strip(), 'test_pkg'
-
- def test_setup_requires_override_nspkg(self):
- """
- Like ``test_setup_requires_overrides_version_conflict`` but where the
- ``setup_requires`` package is part of a namespace package that has
- *already* been imported.
- """
-
- with contexts.save_pkg_resources_state():
- with contexts.tempdir() as temp_dir:
- foobar_1_archive = os.path.join(temp_dir, 'foo.bar-0.1.tar.gz')
- make_nspkg_sdist(foobar_1_archive, 'foo.bar', '0.1')
- # Now actually go ahead an extract to the temp dir and add the
- # extracted path to sys.path so foo.bar v0.1 is importable
- foobar_1_dir = os.path.join(temp_dir, 'foo.bar-0.1')
- os.mkdir(foobar_1_dir)
- with tarfile_open(foobar_1_archive) as tf:
- tf.extractall(foobar_1_dir)
- sys.path.insert(1, foobar_1_dir)
-
- dist = PRDistribution(foobar_1_dir, project_name='foo.bar',
- version='0.1')
- working_set.add(dist)
-
- template = DALS("""\
- import foo # Even with foo imported first the
- # setup_requires package should override
- import setuptools
- setuptools.setup(**%r)
-
- if not (hasattr(foo, '__path__') and
- len(foo.__path__) == 2):
- print('FAIL')
-
- if 'foo.bar-0.2' not in foo.__path__[0]:
- print('FAIL')
- """)
-
- test_pkg = create_setup_requires_package(
- temp_dir, 'foo.bar', '0.2', make_nspkg_sdist, template)
-
- test_setup_py = os.path.join(test_pkg, 'setup.py')
-
- with contexts.quiet() as (stdout, stderr):
- try:
- # Don't even need to install the package, just
- # running the setup.py at all is sufficient
- run_setup(test_setup_py, ['--name'])
- except pkg_resources.VersionConflict:
- self.fail('Installing setup.py requirements '
- 'caused a VersionConflict')
-
- assert 'FAIL' not in stdout.getvalue()
- lines = stdout.readlines()
- assert len(lines) > 0
- assert lines[-1].strip() == 'test_pkg'
-
-
-def make_trivial_sdist(dist_path, distname, version):
- """
- Create a simple sdist tarball at dist_path, containing just a simple
- setup.py.
- """
-
- make_sdist(dist_path, [
- ('setup.py',
- DALS("""\
- import setuptools
- setuptools.setup(
- name=%r,
- version=%r
- )
- """ % (distname, version)))])
-
-
-def make_nspkg_sdist(dist_path, distname, version):
- """
- Make an sdist tarball with distname and version which also contains one
- package with the same name as distname. The top-level package is
- designated a namespace package).
- """
-
- parts = distname.split('.')
- nspackage = parts[0]
-
- packages = ['.'.join(parts[:idx]) for idx in range(1, len(parts) + 1)]
-
- setup_py = DALS("""\
- import setuptools
- setuptools.setup(
- name=%r,
- version=%r,
- packages=%r,
- namespace_packages=[%r]
- )
- """ % (distname, version, packages, nspackage))
-
- init = "__import__('pkg_resources').declare_namespace(__name__)"
-
- files = [('setup.py', setup_py),
- (os.path.join(nspackage, '__init__.py'), init)]
- for package in packages[1:]:
- filename = os.path.join(*(package.split('.') + ['__init__.py']))
- files.append((filename, ''))
-
- make_sdist(dist_path, files)
-
-
-def make_sdist(dist_path, files):
- """
- Create a simple sdist tarball at dist_path, containing the files
- listed in ``files`` as ``(filename, content)`` tuples.
- """
-
- with tarfile_open(dist_path, 'w:gz') as dist:
- for filename, content in files:
- file_bytes = io.BytesIO(content.encode('utf-8'))
- file_info = tarfile.TarInfo(name=filename)
- file_info.size = len(file_bytes.getvalue())
- file_info.mtime = int(time.time())
- dist.addfile(file_info, fileobj=file_bytes)
-
-
-def create_setup_requires_package(path, distname='foobar', version='0.1',
- make_package=make_trivial_sdist,
- setup_py_template=None):
- """Creates a source tree under path for a trivial test package that has a
- single requirement in setup_requires--a tarball for that requirement is
- also created and added to the dependency_links argument.
-
- ``distname`` and ``version`` refer to the name/version of the package that
- the test package requires via ``setup_requires``. The name of the test
- package itself is just 'test_pkg'.
- """
-
- test_setup_attrs = {
- 'name': 'test_pkg', 'version': '0.0',
- 'setup_requires': ['%s==%s' % (distname, version)],
- 'dependency_links': [os.path.abspath(path)]
- }
-
- test_pkg = os.path.join(path, 'test_pkg')
- test_setup_py = os.path.join(test_pkg, 'setup.py')
- os.mkdir(test_pkg)
-
- if setup_py_template is None:
- setup_py_template = DALS("""\
- import setuptools
- setuptools.setup(**%r)
- """)
-
- with open(test_setup_py, 'w') as f:
- f.write(setup_py_template % test_setup_attrs)
-
- foobar_path = os.path.join(path, '%s-%s.tar.gz' % (distname, version))
- make_package(foobar_path, distname, version)
-
- return test_pkg
-
-
-def make_trivial_sdist(dist_path, setup_py):
- """Create a simple sdist tarball at dist_path, containing just a
- setup.py, the contents of which are provided by the setup_py string.
- """
-
- setup_py_file = tarfile.TarInfo(name='setup.py')
- setup_py_bytes = io.BytesIO(setup_py.encode('utf-8'))
- setup_py_file.size = len(setup_py_bytes.getvalue())
- with tarfile_open(dist_path, 'w:gz') as dist:
- dist.addfile(setup_py_file, fileobj=setup_py_bytes)
-
-
-class TestScriptHeader:
- non_ascii_exe = '/Users/José/bin/python'
- exe_with_spaces = r'C:\Program Files\Python33\python.exe'
-
- @pytest.mark.skipif(
- sys.platform.startswith('java') and ei.is_sh(sys.executable),
- reason="Test cannot run under java when executable is sh"
- )
- def test_get_script_header(self):
- expected = '#!%s\n' % ei.nt_quote_arg(os.path.normpath(sys.executable))
- actual = ei.ScriptWriter.get_script_header('#!/usr/local/bin/python')
- assert actual == expected
-
- expected = '#!%s -x\n' % ei.nt_quote_arg(os.path.normpath
- (sys.executable))
- actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python -x')
- assert actual == expected
-
- actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python',
- executable=self.non_ascii_exe)
- expected = '#!%s -x\n' % self.non_ascii_exe
- assert actual == expected
-
- actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python',
- executable='"'+self.exe_with_spaces+'"')
- expected = '#!"%s"\n' % self.exe_with_spaces
- assert actual == expected
-
-
-class TestCommandSpec:
- def test_custom_launch_command(self):
- """
- Show how a custom CommandSpec could be used to specify a #! executable
- which takes parameters.
- """
- cmd = ei.CommandSpec(['/usr/bin/env', 'python3'])
- assert cmd.as_header() == '#!/usr/bin/env python3\n'
-
- def test_from_param_for_CommandSpec_is_passthrough(self):
- """
- from_param should return an instance of a CommandSpec
- """
- cmd = ei.CommandSpec(['python'])
- cmd_new = ei.CommandSpec.from_param(cmd)
- assert cmd is cmd_new
-
- @mock.patch('sys.executable', TestScriptHeader.exe_with_spaces)
- @mock.patch.dict(os.environ)
- def test_from_environment_with_spaces_in_executable(self):
- os.environ.pop('__PYVENV_LAUNCHER__', None)
- cmd = ei.CommandSpec.from_environment()
- assert len(cmd) == 1
- assert cmd.as_header().startswith('#!"')
-
- def test_from_simple_string_uses_shlex(self):
- """
- In order to support `executable = /usr/bin/env my-python`, make sure
- from_param invokes shlex on that input.
- """
- cmd = ei.CommandSpec.from_param('/usr/bin/env my-python')
- assert len(cmd) == 2
- assert '"' not in cmd.as_header()
-
- def test_sys_executable(self):
- """
- CommandSpec.from_string(sys.executable) should contain just that param.
- """
- writer = ei.ScriptWriter.best()
- cmd = writer.command_spec_class.from_string(sys.executable)
- assert len(cmd) == 1
- assert cmd[0] == sys.executable
-
-
-class TestWindowsScriptWriter:
- def test_header(self):
- hdr = ei.WindowsScriptWriter.get_script_header('')
- assert hdr.startswith('#!')
- assert hdr.endswith('\n')
- hdr = hdr.lstrip('#!')
- hdr = hdr.rstrip('\n')
- # header should not start with an escaped quote
- assert not hdr.startswith('\\"')
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
deleted file mode 100644
index 3a0db58f..00000000
--- a/setuptools/tests/test_egg_info.py
+++ /dev/null
@@ -1,249 +0,0 @@
-import os
-import glob
-import re
-import stat
-import sys
-
-from setuptools.command.egg_info import egg_info
-from setuptools.dist import Distribution
-from setuptools.extern.six.moves import map
-
-import pytest
-
-from . import environment
-from .files import build_files
-from .textwrap import DALS
-from . import contexts
-
-
-class Environment(str):
- pass
-
-
-class TestEggInfo(object):
-
- setup_script = DALS("""
- from setuptools import setup
-
- setup(
- name='foo',
- py_modules=['hello'],
- entry_points={'console_scripts': ['hi = hello.run']},
- zip_safe=False,
- )
- """)
-
- def _create_project(self):
- build_files({
- 'setup.py': self.setup_script,
- 'hello.py': DALS("""
- def run():
- print('hello')
- """)
- })
-
- @pytest.yield_fixture
- def env(self):
- with contexts.tempdir(prefix='setuptools-test.') as env_dir:
- env = Environment(env_dir)
- os.chmod(env_dir, stat.S_IRWXU)
- subs = 'home', 'lib', 'scripts', 'data', 'egg-base'
- env.paths = dict(
- (dirname, os.path.join(env_dir, dirname))
- for dirname in subs
- )
- list(map(os.mkdir, env.paths.values()))
- build_files({
- env.paths['home']: {
- '.pydistutils.cfg': DALS("""
- [egg_info]
- egg-base = %(egg-base)s
- """ % env.paths)
- }
- })
- yield env
-
- def test_egg_info_save_version_info_setup_empty(self, tmpdir_cwd, env):
- """
- When the egg_info section is empty or not present, running
- save_version_info should add the settings to the setup.cfg
- in a deterministic order, consistent with the ordering found
- on Python 2.6 and 2.7 with PYTHONHASHSEED=0.
- """
- setup_cfg = os.path.join(env.paths['home'], 'setup.cfg')
- dist = Distribution()
- ei = egg_info(dist)
- ei.initialize_options()
- ei.save_version_info(setup_cfg)
-
- with open(setup_cfg, 'r') as f:
- content = f.read()
-
- assert '[egg_info]' in content
- assert 'tag_build =' in content
- assert 'tag_date = 0' in content
- assert 'tag_svn_revision = 0' in content
-
- expected_order = 'tag_build', 'tag_date', 'tag_svn_revision'
-
- self._validate_content_order(content, expected_order)
-
- @staticmethod
- def _validate_content_order(content, expected):
- """
- Assert that the strings in expected appear in content
- in order.
- """
- if sys.version_info < (2, 7):
- # On Python 2.6, expect dict key order.
- expected = dict.fromkeys(expected).keys()
-
- pattern = '.*'.join(expected)
- flags = re.MULTILINE | re.DOTALL
- assert re.search(pattern, content, flags)
-
- def test_egg_info_save_version_info_setup_defaults(self, tmpdir_cwd, env):
- """
- When running save_version_info on an existing setup.cfg
- with the 'default' values present from a previous run,
- the file should remain unchanged, except on Python 2.6,
- where the order of the keys will be changed to match the
- order as found in a dictionary of those keys.
- """
- setup_cfg = os.path.join(env.paths['home'], 'setup.cfg')
- build_files({
- setup_cfg: DALS("""
- [egg_info]
- tag_build =
- tag_date = 0
- tag_svn_revision = 0
- """),
- })
- dist = Distribution()
- ei = egg_info(dist)
- ei.initialize_options()
- ei.save_version_info(setup_cfg)
-
- with open(setup_cfg, 'r') as f:
- content = f.read()
-
- assert '[egg_info]' in content
- assert 'tag_build =' in content
- assert 'tag_date = 0' in content
- assert 'tag_svn_revision = 0' in content
-
- expected_order = 'tag_build', 'tag_date', 'tag_svn_revision'
-
- self._validate_content_order(content, expected_order)
-
- def test_egg_base_installed_egg_info(self, tmpdir_cwd, env):
- self._create_project()
-
- self._run_install_command(tmpdir_cwd, env)
- actual = self._find_egg_info_files(env.paths['lib'])
-
- expected = [
- 'PKG-INFO',
- 'SOURCES.txt',
- 'dependency_links.txt',
- 'entry_points.txt',
- 'not-zip-safe',
- 'top_level.txt',
- ]
- assert sorted(actual) == expected
-
- def test_manifest_template_is_read(self, tmpdir_cwd, env):
- self._create_project()
- build_files({
- 'MANIFEST.in': DALS("""
- recursive-include docs *.rst
- """),
- 'docs': {
- 'usage.rst': "Run 'hi'",
- }
- })
- self._run_install_command(tmpdir_cwd, env)
- egg_info_dir = self._find_egg_info_files(env.paths['lib']).base
- sources_txt = os.path.join(egg_info_dir, 'SOURCES.txt')
- assert 'docs/usage.rst' in open(sources_txt).read().split('\n')
-
- def _setup_script_with_requires(self, requires_line):
- setup_script = DALS("""
- from setuptools import setup
-
- setup(
- name='foo',
- %s
- zip_safe=False,
- )
- """ % requires_line)
- build_files({
- 'setup.py': setup_script,
- })
-
- def test_install_requires_with_markers(self, tmpdir_cwd, env):
- self._setup_script_with_requires(
- """install_requires=["barbazquux;python_version<'2'"],""")
- self._run_install_command(tmpdir_cwd, env)
- egg_info_dir = self._find_egg_info_files(env.paths['lib']).base
- requires_txt = os.path.join(egg_info_dir, 'requires.txt')
- assert "barbazquux;python_version<'2'" in open(
- requires_txt).read().split('\n')
- assert glob.glob(os.path.join(env.paths['lib'], 'barbazquux*')) == []
-
- def test_setup_requires_with_markers(self, tmpdir_cwd, env):
- self._setup_script_with_requires(
- """setup_requires=["barbazquux;python_version<'2'"],""")
- self._run_install_command(tmpdir_cwd, env)
- assert glob.glob(os.path.join(env.paths['lib'], 'barbazquux*')) == []
-
- def test_tests_require_with_markers(self, tmpdir_cwd, env):
- self._setup_script_with_requires(
- """tests_require=["barbazquux;python_version<'2'"],""")
- self._run_install_command(
- tmpdir_cwd, env, cmd=['test'], output="Ran 0 tests in")
- assert glob.glob(os.path.join(env.paths['lib'], 'barbazquux*')) == []
-
- def test_extra_requires_with_markers(self, tmpdir_cwd, env):
- self._setup_script_with_requires(
- """extra_requires={":python_version<'2'": ["barbazquux"]},""")
- self._run_install_command(tmpdir_cwd, env)
- assert glob.glob(os.path.join(env.paths['lib'], 'barbazquux*')) == []
-
- def _run_install_command(self, tmpdir_cwd, env, cmd=None, output=None):
- environ = os.environ.copy().update(
- HOME=env.paths['home'],
- )
- if cmd is None:
- cmd = [
- 'install',
- '--home', env.paths['home'],
- '--install-lib', env.paths['lib'],
- '--install-scripts', env.paths['scripts'],
- '--install-data', env.paths['data'],
- ]
- code, data = environment.run_setup_py(
- cmd=cmd,
- pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
- data_stream=1,
- env=environ,
- )
- if code:
- raise AssertionError(data)
- if output:
- assert output in data
-
- def _find_egg_info_files(self, root):
- class DirList(list):
- def __init__(self, files, base):
- super(DirList, self).__init__(files)
- self.base = base
-
- results = (
- DirList(filenames, dirpath)
- for dirpath, dirnames, filenames in os.walk(root)
- if os.path.basename(dirpath) == 'EGG-INFO'
- )
- # expect exactly one result
- result, = results
- return result
diff --git a/setuptools/tests/test_find_packages.py b/setuptools/tests/test_find_packages.py
deleted file mode 100644
index 06a7c02e..00000000
--- a/setuptools/tests/test_find_packages.py
+++ /dev/null
@@ -1,170 +0,0 @@
-"""Tests for setuptools.find_packages()."""
-import os
-import sys
-import shutil
-import tempfile
-import platform
-
-import pytest
-
-import setuptools
-from setuptools import find_packages
-
-find_420_packages = setuptools.PEP420PackageFinder.find
-
-# modeled after CPython's test.support.can_symlink
-def can_symlink():
- TESTFN = tempfile.mktemp()
- symlink_path = TESTFN + "can_symlink"
- try:
- os.symlink(TESTFN, symlink_path)
- can = True
- except (OSError, NotImplementedError, AttributeError):
- can = False
- else:
- os.remove(symlink_path)
- globals().update(can_symlink=lambda: can)
- return can
-
-def has_symlink():
- bad_symlink = (
- # Windows symlink directory detection is broken on Python 3.2
- platform.system() == 'Windows' and sys.version_info[:2] == (3,2)
- )
- return can_symlink() and not bad_symlink
-
-class TestFindPackages:
-
- def setup_method(self, method):
- self.dist_dir = tempfile.mkdtemp()
- self._make_pkg_structure()
-
- def teardown_method(self, method):
- shutil.rmtree(self.dist_dir)
-
- def _make_pkg_structure(self):
- """Make basic package structure.
-
- dist/
- docs/
- conf.py
- pkg/
- __pycache__/
- nspkg/
- mod.py
- subpkg/
- assets/
- asset
- __init__.py
- setup.py
-
- """
- self.docs_dir = self._mkdir('docs', self.dist_dir)
- self._touch('conf.py', self.docs_dir)
- self.pkg_dir = self._mkdir('pkg', self.dist_dir)
- self._mkdir('__pycache__', self.pkg_dir)
- self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir)
- self._touch('mod.py', self.ns_pkg_dir)
- self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir)
- self.asset_dir = self._mkdir('assets', self.sub_pkg_dir)
- self._touch('asset', self.asset_dir)
- self._touch('__init__.py', self.sub_pkg_dir)
- self._touch('setup.py', self.dist_dir)
-
- def _mkdir(self, path, parent_dir=None):
- if parent_dir:
- path = os.path.join(parent_dir, path)
- os.mkdir(path)
- return path
-
- def _touch(self, path, dir_=None):
- if dir_:
- path = os.path.join(dir_, path)
- fp = open(path, 'w')
- fp.close()
- return path
-
- def test_regular_package(self):
- self._touch('__init__.py', self.pkg_dir)
- packages = find_packages(self.dist_dir)
- assert packages == ['pkg', 'pkg.subpkg']
-
- def test_exclude(self):
- self._touch('__init__.py', self.pkg_dir)
- packages = find_packages(self.dist_dir, exclude=('pkg.*',))
- assert packages == ['pkg']
-
- def test_include_excludes_other(self):
- """
- If include is specified, other packages should be excluded.
- """
- self._touch('__init__.py', self.pkg_dir)
- alt_dir = self._mkdir('other_pkg', self.dist_dir)
- self._touch('__init__.py', alt_dir)
- packages = find_packages(self.dist_dir, include=['other_pkg'])
- assert packages == ['other_pkg']
-
- def test_dir_with_dot_is_skipped(self):
- shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
- data_dir = self._mkdir('some.data', self.pkg_dir)
- self._touch('__init__.py', data_dir)
- self._touch('file.dat', data_dir)
- packages = find_packages(self.dist_dir)
- assert 'pkg.some.data' not in packages
-
- def test_dir_with_packages_in_subdir_is_excluded(self):
- """
- Ensure that a package in a non-package such as build/pkg/__init__.py
- is excluded.
- """
- build_dir = self._mkdir('build', self.dist_dir)
- build_pkg_dir = self._mkdir('pkg', build_dir)
- self._touch('__init__.py', build_pkg_dir)
- packages = find_packages(self.dist_dir)
- assert 'build.pkg' not in packages
-
- @pytest.mark.skipif(not has_symlink(), reason='Symlink support required')
- def test_symlinked_packages_are_included(self):
- """
- A symbolically-linked directory should be treated like any other
- directory when matched as a package.
-
- Create a link from lpkg -> pkg.
- """
- self._touch('__init__.py', self.pkg_dir)
- linked_pkg = os.path.join(self.dist_dir, 'lpkg')
- os.symlink('pkg', linked_pkg)
- assert os.path.isdir(linked_pkg)
- packages = find_packages(self.dist_dir)
- assert 'lpkg' in packages
-
- def _assert_packages(self, actual, expected):
- assert set(actual) == set(expected)
-
- def test_pep420_ns_package(self):
- packages = find_420_packages(
- self.dist_dir, include=['pkg*'], exclude=['pkg.subpkg.assets'])
- self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
-
- def test_pep420_ns_package_no_includes(self):
- packages = find_420_packages(
- self.dist_dir, exclude=['pkg.subpkg.assets'])
- self._assert_packages(packages, ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg'])
-
- def test_pep420_ns_package_no_includes_or_excludes(self):
- packages = find_420_packages(self.dist_dir)
- expected = [
- 'docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg', 'pkg.subpkg.assets']
- self._assert_packages(packages, expected)
-
- def test_regular_package_with_nested_pep420_ns_packages(self):
- self._touch('__init__.py', self.pkg_dir)
- packages = find_420_packages(
- self.dist_dir, exclude=['docs', 'pkg.subpkg.assets'])
- self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
-
- def test_pep420_ns_package_no_non_package_dirs(self):
- shutil.rmtree(self.docs_dir)
- shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
- packages = find_420_packages(self.dist_dir)
- self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
diff --git a/setuptools/tests/test_integration.py b/setuptools/tests/test_integration.py
deleted file mode 100644
index 04772ba5..00000000
--- a/setuptools/tests/test_integration.py
+++ /dev/null
@@ -1,99 +0,0 @@
-"""Run some integration tests.
-
-Try to install a few packages.
-"""
-
-import glob
-import os
-import sys
-
-from setuptools.extern.six.moves import urllib
-import pytest
-
-from setuptools.command.easy_install import easy_install
-from setuptools.command import easy_install as easy_install_pkg
-from setuptools.dist import Distribution
-
-
-def setup_module(module):
- packages = 'stevedore', 'virtualenvwrapper', 'pbr', 'novaclient'
- for pkg in packages:
- try:
- __import__(pkg)
- tmpl = "Integration tests cannot run when {pkg} is installed"
- pytest.skip(tmpl.format(**locals()))
- except ImportError:
- pass
-
- try:
- urllib.request.urlopen('https://pypi.python.org/pypi')
- except Exception as exc:
- pytest.skip(str(exc))
-
-
-@pytest.fixture
-def install_context(request, tmpdir, monkeypatch):
- """Fixture to set up temporary installation directory.
- """
- # Save old values so we can restore them.
- new_cwd = tmpdir.mkdir('cwd')
- user_base = tmpdir.mkdir('user_base')
- user_site = tmpdir.mkdir('user_site')
- install_dir = tmpdir.mkdir('install_dir')
-
- def fin():
- # undo the monkeypatch, particularly needed under
- # windows because of kept handle on cwd
- monkeypatch.undo()
- new_cwd.remove()
- user_base.remove()
- user_site.remove()
- install_dir.remove()
- request.addfinalizer(fin)
-
- # Change the environment and site settings to control where the
- # files are installed and ensure we do not overwrite anything.
- monkeypatch.chdir(new_cwd)
- monkeypatch.setattr(easy_install_pkg, '__file__', user_site.strpath)
- monkeypatch.setattr('site.USER_BASE', user_base.strpath)
- monkeypatch.setattr('site.USER_SITE', user_site.strpath)
- monkeypatch.setattr('sys.path', sys.path + [install_dir.strpath])
- monkeypatch.setenv('PYTHONPATH', os.path.pathsep.join(sys.path))
-
- # Set up the command for performing the installation.
- dist = Distribution()
- cmd = easy_install(dist)
- cmd.install_dir = install_dir.strpath
- return cmd
-
-
-def _install_one(requirement, cmd, pkgname, modulename):
- cmd.args = [requirement]
- cmd.ensure_finalized()
- cmd.run()
- target = cmd.install_dir
- dest_path = glob.glob(os.path.join(target, pkgname + '*.egg'))
- assert dest_path
- assert os.path.exists(os.path.join(dest_path[0], pkgname, modulename))
-
-
-def test_stevedore(install_context):
- _install_one('stevedore', install_context,
- 'stevedore', 'extension.py')
-
-
-@pytest.mark.xfail
-def test_virtualenvwrapper(install_context):
- _install_one('virtualenvwrapper', install_context,
- 'virtualenvwrapper', 'hook_loader.py')
-
-
-def test_pbr(install_context):
- _install_one('pbr', install_context,
- 'pbr', 'core.py')
-
-
-@pytest.mark.xfail
-def test_python_novaclient(install_context):
- _install_one('python-novaclient', install_context,
- 'novaclient', 'base.py')
diff --git a/setuptools/tests/test_msvc9compiler.py b/setuptools/tests/test_msvc9compiler.py
deleted file mode 100644
index 09e0460c..00000000
--- a/setuptools/tests/test_msvc9compiler.py
+++ /dev/null
@@ -1,179 +0,0 @@
-"""
-Tests for msvc9compiler.
-"""
-
-import os
-import contextlib
-import distutils.errors
-
-import pytest
-try:
- from unittest import mock
-except ImportError:
- import mock
-
-from . import contexts
-
-# importing only setuptools should apply the patch
-__import__('setuptools')
-
-pytest.importorskip("distutils.msvc9compiler")
-
-
-def mock_reg(hkcu=None, hklm=None):
- """
- Return a mock for distutils.msvc9compiler.Reg, patched
- to mock out the functions that access the registry.
- """
-
- _winreg = getattr(distutils.msvc9compiler, '_winreg', None)
- winreg = getattr(distutils.msvc9compiler, 'winreg', _winreg)
-
- hives = {
- winreg.HKEY_CURRENT_USER: hkcu or {},
- winreg.HKEY_LOCAL_MACHINE: hklm or {},
- }
-
- @classmethod
- def read_keys(cls, base, key):
- """Return list of registry keys."""
- hive = hives.get(base, {})
- return [
- k.rpartition('\\')[2]
- for k in hive if k.startswith(key.lower())
- ]
-
- @classmethod
- def read_values(cls, base, key):
- """Return dict of registry keys and values."""
- hive = hives.get(base, {})
- return dict(
- (k.rpartition('\\')[2], hive[k])
- for k in hive if k.startswith(key.lower())
- )
-
- return mock.patch.multiple(distutils.msvc9compiler.Reg,
- read_keys=read_keys, read_values=read_values)
-
-
-class TestModulePatch:
- """
- Ensure that importing setuptools is sufficient to replace
- the standard find_vcvarsall function with a version that
- recognizes the "Visual C++ for Python" package.
- """
-
- key_32 = r'software\microsoft\devdiv\vcforpython\9.0\installdir'
- key_64 = r'software\wow6432node\microsoft\devdiv\vcforpython\9.0\installdir'
-
- def test_patched(self):
- "Test the module is actually patched"
- mod_name = distutils.msvc9compiler.find_vcvarsall.__module__
- assert mod_name == "setuptools.msvc9_support", "find_vcvarsall unpatched"
-
- def test_no_registry_entryies_means_nothing_found(self):
- """
- No registry entries or environment variable should lead to an error
- directing the user to download vcpython27.
- """
- find_vcvarsall = distutils.msvc9compiler.find_vcvarsall
- query_vcvarsall = distutils.msvc9compiler.query_vcvarsall
-
- with contexts.environment(VS90COMNTOOLS=None):
- with mock_reg():
- assert find_vcvarsall(9.0) is None
-
- expected = distutils.errors.DistutilsPlatformError
- with pytest.raises(expected) as exc:
- query_vcvarsall(9.0)
- assert 'aka.ms/vcpython27' in str(exc)
-
- @pytest.yield_fixture
- def user_preferred_setting(self):
- """
- Set up environment with different install dirs for user vs. system
- and yield the user_install_dir for the expected result.
- """
- with self.mock_install_dir() as user_install_dir:
- with self.mock_install_dir() as system_install_dir:
- reg = mock_reg(
- hkcu={
- self.key_32: user_install_dir,
- },
- hklm={
- self.key_32: system_install_dir,
- self.key_64: system_install_dir,
- },
- )
- with reg:
- yield user_install_dir
-
- def test_prefer_current_user(self, user_preferred_setting):
- """
- Ensure user's settings are preferred.
- """
- result = distutils.msvc9compiler.find_vcvarsall(9.0)
- expected = os.path.join(user_preferred_setting, 'vcvarsall.bat')
- assert expected == result
-
- @pytest.yield_fixture
- def local_machine_setting(self):
- """
- Set up environment with only the system environment configured.
- """
- with self.mock_install_dir() as system_install_dir:
- reg = mock_reg(
- hklm={
- self.key_32: system_install_dir,
- },
- )
- with reg:
- yield system_install_dir
-
- def test_local_machine_recognized(self, local_machine_setting):
- """
- Ensure machine setting is honored if user settings are not present.
- """
- result = distutils.msvc9compiler.find_vcvarsall(9.0)
- expected = os.path.join(local_machine_setting, 'vcvarsall.bat')
- assert expected == result
-
- @pytest.yield_fixture
- def x64_preferred_setting(self):
- """
- Set up environment with 64-bit and 32-bit system settings configured
- and yield the canonical location.
- """
- with self.mock_install_dir() as x32_dir:
- with self.mock_install_dir() as x64_dir:
- reg = mock_reg(
- hklm={
- # This *should* only exist on 32-bit machines
- self.key_32: x32_dir,
- # This *should* only exist on 64-bit machines
- self.key_64: x64_dir,
- },
- )
- with reg:
- yield x32_dir
-
- def test_ensure_64_bit_preferred(self, x64_preferred_setting):
- """
- Ensure 64-bit system key is preferred.
- """
- result = distutils.msvc9compiler.find_vcvarsall(9.0)
- expected = os.path.join(x64_preferred_setting, 'vcvarsall.bat')
- assert expected == result
-
- @staticmethod
- @contextlib.contextmanager
- def mock_install_dir():
- """
- Make a mock install dir in a unique location so that tests can
- distinguish which dir was detected in a given scenario.
- """
- with contexts.tempdir() as result:
- vcvarsall = os.path.join(result, 'vcvarsall.bat')
- with open(vcvarsall, 'w'):
- pass
- yield result
diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py
deleted file mode 100644
index 6a76b5fc..00000000
--- a/setuptools/tests/test_packageindex.py
+++ /dev/null
@@ -1,225 +0,0 @@
-from __future__ import absolute_import
-
-import sys
-import os
-import distutils.errors
-
-from setuptools.extern import six
-from setuptools.extern.six.moves import urllib, http_client
-
-from .textwrap import DALS
-import pkg_resources
-import setuptools.package_index
-from setuptools.tests.server import IndexServer
-
-
-class TestPackageIndex:
-
- def test_bad_url_bad_port(self):
- index = setuptools.package_index.PackageIndex()
- url = 'http://127.0.0.1:0/nonesuch/test_package_index'
- try:
- v = index.open_url(url)
- except Exception as v:
- assert url in str(v)
- else:
- assert isinstance(v, urllib.error.HTTPError)
-
- def test_bad_url_typo(self):
- # issue 16
- # easy_install inquant.contentmirror.plone breaks because of a typo
- # in its home URL
- index = setuptools.package_index.PackageIndex(
- hosts=('www.example.com',)
- )
-
- url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk'
- try:
- v = index.open_url(url)
- except Exception as v:
- assert url in str(v)
- else:
- assert isinstance(v, urllib.error.HTTPError)
-
- def test_bad_url_bad_status_line(self):
- index = setuptools.package_index.PackageIndex(
- hosts=('www.example.com',)
- )
-
- def _urlopen(*args):
- raise http_client.BadStatusLine('line')
-
- index.opener = _urlopen
- url = 'http://example.com'
- try:
- v = index.open_url(url)
- except Exception as v:
- assert 'line' in str(v)
- else:
- raise AssertionError('Should have raise here!')
-
- def test_bad_url_double_scheme(self):
- """
- A bad URL with a double scheme should raise a DistutilsError.
- """
- index = setuptools.package_index.PackageIndex(
- hosts=('www.example.com',)
- )
-
- # issue 20
- url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
- try:
- index.open_url(url)
- except distutils.errors.DistutilsError as error:
- msg = six.text_type(error)
- assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg
- return
- raise RuntimeError("Did not raise")
-
- def test_bad_url_screwy_href(self):
- index = setuptools.package_index.PackageIndex(
- hosts=('www.example.com',)
- )
-
- # issue #160
- if sys.version_info[0] == 2 and sys.version_info[1] == 7:
- # this should not fail
- url = 'http://example.com'
- page = ('<a href="http://www.famfamfam.com]('
- 'http://www.famfamfam.com/">')
- index.process_index(url, page)
-
- def test_url_ok(self):
- index = setuptools.package_index.PackageIndex(
- hosts=('www.example.com',)
- )
- url = 'file:///tmp/test_package_index'
- assert index.url_ok(url, True)
-
- def test_links_priority(self):
- """
- Download links from the pypi simple index should be used before
- external download links.
- https://bitbucket.org/tarek/distribute/issue/163
-
- Usecase :
- - someone uploads a package on pypi, a md5 is generated
- - someone manually copies this link (with the md5 in the url) onto an
- external page accessible from the package page.
- - someone reuploads the package (with a different md5)
- - while easy_installing, an MD5 error occurs because the external link
- is used
- -> Setuptools should use the link from pypi, not the external one.
- """
- if sys.platform.startswith('java'):
- # Skip this test on jython because binding to :0 fails
- return
-
- # start an index server
- server = IndexServer()
- server.start()
- index_url = server.base_url() + 'test_links_priority/simple/'
-
- # scan a test index
- pi = setuptools.package_index.PackageIndex(index_url)
- requirement = pkg_resources.Requirement.parse('foobar')
- pi.find_packages(requirement)
- server.stop()
-
- # the distribution has been found
- assert 'foobar' in pi
- # we have only one link, because links are compared without md5
- assert len(pi['foobar'])==1
- # the link should be from the index
- assert 'correct_md5' in pi['foobar'][0].location
-
- def test_parse_bdist_wininst(self):
- parse = setuptools.package_index.parse_bdist_wininst
-
- actual = parse('reportlab-2.5.win32-py2.4.exe')
- expected = 'reportlab-2.5', '2.4', 'win32'
- assert actual == expected
-
- actual = parse('reportlab-2.5.win32.exe')
- expected = 'reportlab-2.5', None, 'win32'
- assert actual == expected
-
- actual = parse('reportlab-2.5.win-amd64-py2.7.exe')
- expected = 'reportlab-2.5', '2.7', 'win-amd64'
- assert actual == expected
-
- actual = parse('reportlab-2.5.win-amd64.exe')
- expected = 'reportlab-2.5', None, 'win-amd64'
- assert actual == expected
-
- def test__vcs_split_rev_from_url(self):
- """
- Test the basic usage of _vcs_split_rev_from_url
- """
- vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url
- url, rev = vsrfu('https://example.com/bar@2995')
- assert url == 'https://example.com/bar'
- assert rev == '2995'
-
- def test_local_index(self, tmpdir):
- """
- local_open should be able to read an index from the file system.
- """
- index_file = tmpdir / 'index.html'
- with index_file.open('w') as f:
- f.write('<div>content</div>')
- url = 'file:' + urllib.request.pathname2url(str(tmpdir)) + '/'
- res = setuptools.package_index.local_open(url)
- assert 'content' in res.read()
-
-
-class TestContentCheckers:
-
- def test_md5(self):
- checker = setuptools.package_index.HashChecker.from_url(
- 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
- checker.feed('You should probably not be using MD5'.encode('ascii'))
- assert checker.hash.hexdigest() == 'f12895fdffbd45007040d2e44df98478'
- assert checker.is_valid()
-
- def test_other_fragment(self):
- "Content checks should succeed silently if no hash is present"
- checker = setuptools.package_index.HashChecker.from_url(
- 'http://foo/bar#something%20completely%20different')
- checker.feed('anything'.encode('ascii'))
- assert checker.is_valid()
-
- def test_blank_md5(self):
- "Content checks should succeed if a hash is empty"
- checker = setuptools.package_index.HashChecker.from_url(
- 'http://foo/bar#md5=')
- checker.feed('anything'.encode('ascii'))
- assert checker.is_valid()
-
- def test_get_hash_name_md5(self):
- checker = setuptools.package_index.HashChecker.from_url(
- 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
- assert checker.hash_name == 'md5'
-
- def test_report(self):
- checker = setuptools.package_index.HashChecker.from_url(
- 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
- rep = checker.report(lambda x: x, 'My message about %s')
- assert rep == 'My message about md5'
-
-
-class TestPyPIConfig:
- def test_percent_in_password(self, tmpdir, monkeypatch):
- monkeypatch.setitem(os.environ, 'HOME', str(tmpdir))
- pypirc = tmpdir / '.pypirc'
- with pypirc.open('w') as strm:
- strm.write(DALS("""
- [pypi]
- repository=https://pypi.python.org
- username=jaraco
- password=pity%
- """))
- cfg = setuptools.package_index.PyPIConfig()
- cred = cfg.creds_by_repository['https://pypi.python.org']
- assert cred.username == 'jaraco'
- assert cred.password == 'pity%'
diff --git a/setuptools/tests/test_sandbox.py b/setuptools/tests/test_sandbox.py
deleted file mode 100644
index fefd46f7..00000000
--- a/setuptools/tests/test_sandbox.py
+++ /dev/null
@@ -1,141 +0,0 @@
-"""develop tests
-"""
-import os
-import types
-
-import pytest
-
-import pkg_resources
-import setuptools.sandbox
-from setuptools.sandbox import DirectorySandbox
-
-
-class TestSandbox:
-
- def test_devnull(self, tmpdir):
- sandbox = DirectorySandbox(str(tmpdir))
- sandbox.run(self._file_writer(os.devnull))
-
- @staticmethod
- def _file_writer(path):
- def do_write():
- with open(path, 'w') as f:
- f.write('xxx')
- return do_write
-
- def test_win32com(self, tmpdir):
- """
- win32com should not be prevented from caching COM interfaces
- in gen_py.
- """
- win32com = pytest.importorskip('win32com')
- gen_py = win32com.__gen_path__
- target = os.path.join(gen_py, 'test_write')
- sandbox = DirectorySandbox(str(tmpdir))
- try:
- # attempt to create gen_py file
- sandbox.run(self._file_writer(target))
- finally:
- if os.path.exists(target):
- os.remove(target)
-
- def test_setup_py_with_BOM(self):
- """
- It should be possible to execute a setup.py with a Byte Order Mark
- """
- target = pkg_resources.resource_filename(__name__,
- 'script-with-bom.py')
- namespace = types.ModuleType('namespace')
- setuptools.sandbox._execfile(target, vars(namespace))
- assert namespace.result == 'passed'
-
- def test_setup_py_with_CRLF(self, tmpdir):
- setup_py = tmpdir / 'setup.py'
- with setup_py.open('wb') as stream:
- stream.write(b'"degenerate script"\r\n')
- setuptools.sandbox._execfile(str(setup_py), globals())
-
-
-class TestExceptionSaver:
- def test_exception_trapped(self):
- with setuptools.sandbox.ExceptionSaver():
- raise ValueError("details")
-
- def test_exception_resumed(self):
- with setuptools.sandbox.ExceptionSaver() as saved_exc:
- raise ValueError("details")
-
- with pytest.raises(ValueError) as caught:
- saved_exc.resume()
-
- assert isinstance(caught.value, ValueError)
- assert str(caught.value) == 'details'
-
- def test_exception_reconstructed(self):
- orig_exc = ValueError("details")
-
- with setuptools.sandbox.ExceptionSaver() as saved_exc:
- raise orig_exc
-
- with pytest.raises(ValueError) as caught:
- saved_exc.resume()
-
- assert isinstance(caught.value, ValueError)
- assert caught.value is not orig_exc
-
- def test_no_exception_passes_quietly(self):
- with setuptools.sandbox.ExceptionSaver() as saved_exc:
- pass
-
- saved_exc.resume()
-
- def test_unpickleable_exception(self):
- class CantPickleThis(Exception):
- "This Exception is unpickleable because it's not in globals"
-
- with setuptools.sandbox.ExceptionSaver() as saved_exc:
- raise CantPickleThis('detail')
-
- with pytest.raises(setuptools.sandbox.UnpickleableException) as caught:
- saved_exc.resume()
-
- assert str(caught.value) == "CantPickleThis('detail',)"
-
- def test_unpickleable_exception_when_hiding_setuptools(self):
- """
- As revealed in #440, an infinite recursion can occur if an unpickleable
- exception while setuptools is hidden. Ensure this doesn't happen.
- """
- class ExceptionUnderTest(Exception):
- """
- An unpickleable exception (not in globals).
- """
-
- with pytest.raises(setuptools.sandbox.UnpickleableException) as caught:
- with setuptools.sandbox.save_modules():
- setuptools.sandbox.hide_setuptools()
- raise ExceptionUnderTest()
-
- msg, = caught.value.args
- assert msg == 'ExceptionUnderTest()'
-
- def test_sandbox_violation_raised_hiding_setuptools(self, tmpdir):
- """
- When in a sandbox with setuptools hidden, a SandboxViolation
- should reflect a proper exception and not be wrapped in
- an UnpickleableException.
- """
- def write_file():
- "Trigger a SandboxViolation by writing outside the sandbox"
- with open('/etc/foo', 'w'):
- pass
- sandbox = DirectorySandbox(str(tmpdir))
- with pytest.raises(setuptools.sandbox.SandboxViolation) as caught:
- with setuptools.sandbox.save_modules():
- setuptools.sandbox.hide_setuptools()
- sandbox.run(write_file)
-
- cmd, args, kwargs = caught.value.args
- assert cmd == 'open'
- assert args == ('/etc/foo', 'w')
- assert kwargs == {}
diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
deleted file mode 100644
index d2a1f1bb..00000000
--- a/setuptools/tests/test_sdist.py
+++ /dev/null
@@ -1,423 +0,0 @@
-# -*- coding: utf-8 -*-
-"""sdist tests"""
-
-import os
-import shutil
-import sys
-import tempfile
-import unicodedata
-import contextlib
-import io
-
-from setuptools.extern import six
-from setuptools.extern.six.moves import map
-
-import pytest
-
-import pkg_resources
-from setuptools.command.sdist import sdist
-from setuptools.command.egg_info import manifest_maker
-from setuptools.dist import Distribution
-from setuptools.tests import fail_on_ascii
-
-
-py3_only = pytest.mark.xfail(six.PY2, reason="Test runs on Python 3 only")
-
-
-SETUP_ATTRS = {
- 'name': 'sdist_test',
- 'version': '0.0',
- 'packages': ['sdist_test'],
- 'package_data': {'sdist_test': ['*.txt']}
-}
-
-
-SETUP_PY = """\
-from setuptools import setup
-
-setup(**%r)
-""" % SETUP_ATTRS
-
-
-if six.PY3:
- LATIN1_FILENAME = 'smörbröd.py'.encode('latin-1')
-else:
- LATIN1_FILENAME = 'sm\xf6rbr\xf6d.py'
-
-
-# Cannot use context manager because of Python 2.4
-@contextlib.contextmanager
-def quiet():
- old_stdout, old_stderr = sys.stdout, sys.stderr
- sys.stdout, sys.stderr = six.StringIO(), six.StringIO()
- try:
- yield
- finally:
- sys.stdout, sys.stderr = old_stdout, old_stderr
-
-
-# Fake byte literals for Python <= 2.5
-def b(s, encoding='utf-8'):
- if six.PY3:
- return s.encode(encoding)
- return s
-
-
-# Convert to POSIX path
-def posix(path):
- if six.PY3 and not isinstance(path, str):
- return path.replace(os.sep.encode('ascii'), b('/'))
- else:
- return path.replace(os.sep, '/')
-
-
-# HFS Plus uses decomposed UTF-8
-def decompose(path):
- if isinstance(path, six.text_type):
- return unicodedata.normalize('NFD', path)
- try:
- path = path.decode('utf-8')
- path = unicodedata.normalize('NFD', path)
- path = path.encode('utf-8')
- except UnicodeError:
- pass # Not UTF-8
- return path
-
-
-def read_all_bytes(filename):
- with io.open(filename, 'rb') as fp:
- return fp.read()
-
-
-class TestSdistTest:
-
- def setup_method(self, method):
- self.temp_dir = tempfile.mkdtemp()
- f = open(os.path.join(self.temp_dir, 'setup.py'), 'w')
- f.write(SETUP_PY)
- f.close()
-
- # Set up the rest of the test package
- test_pkg = os.path.join(self.temp_dir, 'sdist_test')
- os.mkdir(test_pkg)
- # *.rst was not included in package_data, so c.rst should not be
- # automatically added to the manifest when not under version control
- for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']:
- # Just touch the files; their contents are irrelevant
- open(os.path.join(test_pkg, fname), 'w').close()
-
- self.old_cwd = os.getcwd()
- os.chdir(self.temp_dir)
-
- def teardown_method(self, method):
- os.chdir(self.old_cwd)
- shutil.rmtree(self.temp_dir)
-
- def test_package_data_in_sdist(self):
- """Regression test for pull request #4: ensures that files listed in
- package_data are included in the manifest even if they're not added to
- version control.
- """
-
- dist = Distribution(SETUP_ATTRS)
- dist.script_name = 'setup.py'
- cmd = sdist(dist)
- cmd.ensure_finalized()
-
- with quiet():
- cmd.run()
-
- manifest = cmd.filelist.files
- assert os.path.join('sdist_test', 'a.txt') in manifest
- assert os.path.join('sdist_test', 'b.txt') in manifest
- assert os.path.join('sdist_test', 'c.rst') not in manifest
-
-
- def test_defaults_case_sensitivity(self):
- """
- Make sure default files (README.*, etc.) are added in a case-sensitive
- way to avoid problems with packages built on Windows.
- """
-
- open(os.path.join(self.temp_dir, 'readme.rst'), 'w').close()
- open(os.path.join(self.temp_dir, 'SETUP.cfg'), 'w').close()
-
- dist = Distribution(SETUP_ATTRS)
- # the extension deliberately capitalized for this test
- # to make sure the actual filename (not capitalized) gets added
- # to the manifest
- dist.script_name = 'setup.PY'
- cmd = sdist(dist)
- cmd.ensure_finalized()
-
- with quiet():
- cmd.run()
-
- # lowercase all names so we can test in a case-insensitive way to make sure the files are not included
- manifest = map(lambda x: x.lower(), cmd.filelist.files)
- assert 'readme.rst' not in manifest, manifest
- assert 'setup.py' not in manifest, manifest
- assert 'setup.cfg' not in manifest, manifest
-
- @fail_on_ascii
- def test_manifest_is_written_with_utf8_encoding(self):
- # Test for #303.
- dist = Distribution(SETUP_ATTRS)
- dist.script_name = 'setup.py'
- mm = manifest_maker(dist)
- mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
- os.mkdir('sdist_test.egg-info')
-
- # UTF-8 filename
- filename = os.path.join('sdist_test', 'smörbröd.py')
-
- # Must create the file or it will get stripped.
- open(filename, 'w').close()
-
- # Add UTF-8 filename and write manifest
- with quiet():
- mm.run()
- mm.filelist.append(filename)
- mm.write_manifest()
-
- contents = read_all_bytes(mm.manifest)
-
- # The manifest should be UTF-8 encoded
- u_contents = contents.decode('UTF-8')
-
- # The manifest should contain the UTF-8 filename
- if six.PY2:
- fs_enc = sys.getfilesystemencoding()
- filename = filename.decode(fs_enc)
-
- assert posix(filename) in u_contents
-
- @py3_only
- @fail_on_ascii
- def test_write_manifest_allows_utf8_filenames(self):
- # Test for #303.
- dist = Distribution(SETUP_ATTRS)
- dist.script_name = 'setup.py'
- mm = manifest_maker(dist)
- mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
- os.mkdir('sdist_test.egg-info')
-
- # UTF-8 filename
- filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
-
- # Must touch the file or risk removal
- open(filename, "w").close()
-
- # Add filename and write manifest
- with quiet():
- mm.run()
- u_filename = filename.decode('utf-8')
- mm.filelist.files.append(u_filename)
- # Re-write manifest
- mm.write_manifest()
-
- contents = read_all_bytes(mm.manifest)
-
- # The manifest should be UTF-8 encoded
- contents.decode('UTF-8')
-
- # The manifest should contain the UTF-8 filename
- assert posix(filename) in contents
-
- # The filelist should have been updated as well
- assert u_filename in mm.filelist.files
-
- @py3_only
- def test_write_manifest_skips_non_utf8_filenames(self):
- """
- Files that cannot be encoded to UTF-8 (specifically, those that
- weren't originally successfully decoded and have surrogate
- escapes) should be omitted from the manifest.
- See https://bitbucket.org/tarek/distribute/issue/303 for history.
- """
- dist = Distribution(SETUP_ATTRS)
- dist.script_name = 'setup.py'
- mm = manifest_maker(dist)
- mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
- os.mkdir('sdist_test.egg-info')
-
- # Latin-1 filename
- filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
-
- # Add filename with surrogates and write manifest
- with quiet():
- mm.run()
- u_filename = filename.decode('utf-8', 'surrogateescape')
- mm.filelist.append(u_filename)
- # Re-write manifest
- mm.write_manifest()
-
- contents = read_all_bytes(mm.manifest)
-
- # The manifest should be UTF-8 encoded
- contents.decode('UTF-8')
-
- # The Latin-1 filename should have been skipped
- assert posix(filename) not in contents
-
- # The filelist should have been updated as well
- assert u_filename not in mm.filelist.files
-
- @fail_on_ascii
- def test_manifest_is_read_with_utf8_encoding(self):
- # Test for #303.
- dist = Distribution(SETUP_ATTRS)
- dist.script_name = 'setup.py'
- cmd = sdist(dist)
- cmd.ensure_finalized()
-
- # Create manifest
- with quiet():
- cmd.run()
-
- # Add UTF-8 filename to manifest
- filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
- cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
- manifest = open(cmd.manifest, 'ab')
- manifest.write(b('\n') + filename)
- manifest.close()
-
- # The file must exist to be included in the filelist
- open(filename, 'w').close()
-
- # Re-read manifest
- cmd.filelist.files = []
- with quiet():
- cmd.read_manifest()
-
- # The filelist should contain the UTF-8 filename
- if six.PY3:
- filename = filename.decode('utf-8')
- assert filename in cmd.filelist.files
-
- @py3_only
- def test_read_manifest_skips_non_utf8_filenames(self):
- # Test for #303.
- dist = Distribution(SETUP_ATTRS)
- dist.script_name = 'setup.py'
- cmd = sdist(dist)
- cmd.ensure_finalized()
-
- # Create manifest
- with quiet():
- cmd.run()
-
- # Add Latin-1 filename to manifest
- filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
- cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
- manifest = open(cmd.manifest, 'ab')
- manifest.write(b('\n') + filename)
- manifest.close()
-
- # The file must exist to be included in the filelist
- open(filename, 'w').close()
-
- # Re-read manifest
- cmd.filelist.files = []
- with quiet():
- cmd.read_manifest()
-
- # The Latin-1 filename should have been skipped
- filename = filename.decode('latin-1')
- assert filename not in cmd.filelist.files
-
- @fail_on_ascii
- def test_sdist_with_utf8_encoded_filename(self):
- # Test for #303.
- dist = Distribution(SETUP_ATTRS)
- dist.script_name = 'setup.py'
- cmd = sdist(dist)
- cmd.ensure_finalized()
-
- # UTF-8 filename
- filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
- open(filename, 'w').close()
-
- with quiet():
- cmd.run()
-
- if sys.platform == 'darwin':
- filename = decompose(filename)
-
- if six.PY3:
- fs_enc = sys.getfilesystemencoding()
-
- if sys.platform == 'win32':
- if fs_enc == 'cp1252':
- # Python 3 mangles the UTF-8 filename
- filename = filename.decode('cp1252')
- assert filename in cmd.filelist.files
- else:
- filename = filename.decode('mbcs')
- assert filename in cmd.filelist.files
- else:
- filename = filename.decode('utf-8')
- assert filename in cmd.filelist.files
- else:
- assert filename in cmd.filelist.files
-
- def test_sdist_with_latin1_encoded_filename(self):
- # Test for #303.
- dist = Distribution(SETUP_ATTRS)
- dist.script_name = 'setup.py'
- cmd = sdist(dist)
- cmd.ensure_finalized()
-
- # Latin-1 filename
- filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
- open(filename, 'w').close()
- assert os.path.isfile(filename)
-
- with quiet():
- cmd.run()
-
- if six.PY3:
- # not all windows systems have a default FS encoding of cp1252
- if sys.platform == 'win32':
- # Latin-1 is similar to Windows-1252 however
- # on mbcs filesys it is not in latin-1 encoding
- fs_enc = sys.getfilesystemencoding()
- if fs_enc == 'mbcs':
- filename = filename.decode('mbcs')
- else:
- filename = filename.decode('latin-1')
-
- assert filename in cmd.filelist.files
- else:
- # The Latin-1 filename should have been skipped
- filename = filename.decode('latin-1')
- filename not in cmd.filelist.files
- else:
- # Under Python 2 there seems to be no decoded string in the
- # filelist. However, due to decode and encoding of the
- # file name to get utf-8 Manifest the latin1 maybe excluded
- try:
- # fs_enc should match how one is expect the decoding to
- # be proformed for the manifest output.
- fs_enc = sys.getfilesystemencoding()
- filename.decode(fs_enc)
- assert filename in cmd.filelist.files
- except UnicodeDecodeError:
- filename not in cmd.filelist.files
-
-
-def test_default_revctrl():
- """
- When _default_revctrl was removed from the `setuptools.command.sdist`
- module in 10.0, it broke some systems which keep an old install of
- setuptools (Distribute) around. Those old versions require that the
- setuptools package continue to implement that interface, so this
- function provides that interface, stubbed. See #320 for details.
-
- This interface must be maintained until Ubuntu 12.04 is no longer
- supported (by Setuptools).
- """
- ep_def = 'svn_cvs = setuptools.command.sdist:_default_revctrl'
- ep = pkg_resources.EntryPoint.parse(ep_def)
- res = ep.resolve()
- assert hasattr(res, '__iter__')
diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py
deleted file mode 100644
index e59800d2..00000000
--- a/setuptools/tests/test_setuptools.py
+++ /dev/null
@@ -1,48 +0,0 @@
-import os
-
-import pytest
-
-import setuptools
-
-
-@pytest.fixture
-def example_source(tmpdir):
- tmpdir.mkdir('foo')
- (tmpdir / 'foo/bar.py').write('')
- (tmpdir / 'readme.txt').write('')
- return tmpdir
-
-
-def test_findall(example_source):
- found = list(setuptools.findall(str(example_source)))
- expected = ['readme.txt', 'foo/bar.py']
- expected = [example_source.join(fn) for fn in expected]
- assert found == expected
-
-
-def test_findall_curdir(example_source):
- with example_source.as_cwd():
- found = list(setuptools.findall())
- expected = ['readme.txt', os.path.join('foo', 'bar.py')]
- assert found == expected
-
-
-@pytest.fixture
-def can_symlink(tmpdir):
- """
- Skip if cannot create a symbolic link
- """
- link_fn = 'link'
- target_fn = 'target'
- try:
- os.symlink(target_fn, link_fn)
- except (OSError, NotImplementedError, AttributeError):
- pytest.skip("Cannot create symbolic links")
- os.remove(link_fn)
-
-
-def test_findall_missing_symlink(tmpdir, can_symlink):
- with tmpdir.as_cwd():
- os.symlink('foo', 'bar')
- found = list(setuptools.findall())
- assert found == []
diff --git a/setuptools/tests/test_test.py b/setuptools/tests/test_test.py
deleted file mode 100644
index 4155a5b1..00000000
--- a/setuptools/tests/test_test.py
+++ /dev/null
@@ -1,92 +0,0 @@
-# -*- coding: UTF-8 -*-
-
-from __future__ import unicode_literals
-
-import os
-import site
-from distutils.errors import DistutilsError
-
-import pytest
-
-from setuptools.command.test import test
-from setuptools.dist import Distribution
-
-from .textwrap import DALS
-from . import contexts
-
-SETUP_PY = DALS("""
- from setuptools import setup
-
- setup(name='foo',
- packages=['name', 'name.space', 'name.space.tests'],
- namespace_packages=['name'],
- test_suite='name.space.tests.test_suite',
- )
- """)
-
-NS_INIT = DALS("""
- # -*- coding: Latin-1 -*-
- # Söme Arbiträry Ünicode to test Distribute Issüé 310
- try:
- __import__('pkg_resources').declare_namespace(__name__)
- except ImportError:
- from pkgutil import extend_path
- __path__ = extend_path(__path__, __name__)
- """)
-
-TEST_PY = DALS("""
- import unittest
-
- class TestTest(unittest.TestCase):
- def test_test(self):
- print "Foo" # Should fail under Python 3 unless 2to3 is used
-
- test_suite = unittest.makeSuite(TestTest)
- """)
-
-
-@pytest.fixture
-def sample_test(tmpdir_cwd):
- os.makedirs('name/space/tests')
-
- # setup.py
- with open('setup.py', 'wt') as f:
- f.write(SETUP_PY)
-
- # name/__init__.py
- with open('name/__init__.py', 'wb') as f:
- f.write(NS_INIT.encode('Latin-1'))
-
- # name/space/__init__.py
- with open('name/space/__init__.py', 'wt') as f:
- f.write('#empty\n')
-
- # name/space/tests/__init__.py
- with open('name/space/tests/__init__.py', 'wt') as f:
- f.write(TEST_PY)
-
-
-@pytest.mark.skipif('hasattr(sys, "real_prefix")')
-@pytest.mark.usefixtures('user_override')
-@pytest.mark.usefixtures('sample_test')
-class TestTestTest:
-
- def test_test(self):
- params = dict(
- name='foo',
- packages=['name', 'name.space', 'name.space.tests'],
- namespace_packages=['name'],
- test_suite='name.space.tests.test_suite',
- use_2to3=True,
- )
- dist = Distribution(params)
- dist.script_name = 'setup.py'
- cmd = test(dist)
- cmd.user = 1
- cmd.ensure_finalized()
- cmd.install_dir = site.USER_SITE
- cmd.user = 1
- with contexts.quiet():
- # The test runner calls sys.exit
- with contexts.suppress_exceptions(SystemExit):
- cmd.run()
diff --git a/setuptools/tests/test_unicode_utils.py b/setuptools/tests/test_unicode_utils.py
deleted file mode 100644
index a24a9bd5..00000000
--- a/setuptools/tests/test_unicode_utils.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from setuptools import unicode_utils
-
-
-def test_filesys_decode_fs_encoding_is_None(monkeypatch):
- """
- Test filesys_decode does not raise TypeError when
- getfilesystemencoding returns None.
- """
- monkeypatch.setattr('sys.getfilesystemencoding', lambda: None)
- unicode_utils.filesys_decode(b'test')
diff --git a/setuptools/tests/test_upload_docs.py b/setuptools/tests/test_upload_docs.py
deleted file mode 100644
index cc71cadb..00000000
--- a/setuptools/tests/test_upload_docs.py
+++ /dev/null
@@ -1,59 +0,0 @@
-import os
-import zipfile
-import contextlib
-
-import pytest
-
-from setuptools.command.upload_docs import upload_docs
-from setuptools.dist import Distribution
-
-from .textwrap import DALS
-from . import contexts
-
-
-SETUP_PY = DALS(
- """
- from setuptools import setup
-
- setup(name='foo')
- """)
-
-
-@pytest.fixture
-def sample_project(tmpdir_cwd):
- # setup.py
- with open('setup.py', 'wt') as f:
- f.write(SETUP_PY)
-
- os.mkdir('build')
-
- # A test document.
- with open('build/index.html', 'w') as f:
- f.write("Hello world.")
-
- # An empty folder.
- os.mkdir('build/empty')
-
-
-@pytest.mark.usefixtures('sample_project')
-@pytest.mark.usefixtures('user_override')
-class TestUploadDocsTest:
-
- def test_create_zipfile(self):
- """
- Ensure zipfile creation handles common cases, including a folder
- containing an empty folder.
- """
-
- dist = Distribution()
-
- cmd = upload_docs(dist)
- cmd.target_dir = cmd.upload_dir = 'build'
- with contexts.tempdir() as tmp_dir:
- tmp_file = os.path.join(tmp_dir, 'foo.zip')
- zip_file = cmd.create_zipfile(tmp_file)
-
- assert zipfile.is_zipfile(tmp_file)
-
- with contextlib.closing(zipfile.ZipFile(tmp_file)) as zip_file:
- assert zip_file.namelist() == ['index.html']
diff --git a/setuptools/tests/test_windows_wrappers.py b/setuptools/tests/test_windows_wrappers.py
deleted file mode 100644
index 5b14d07b..00000000
--- a/setuptools/tests/test_windows_wrappers.py
+++ /dev/null
@@ -1,183 +0,0 @@
-"""
-Python Script Wrapper for Windows
-=================================
-
-setuptools includes wrappers for Python scripts that allows them to be
-executed like regular windows programs. There are 2 wrappers, one
-for command-line programs, cli.exe, and one for graphical programs,
-gui.exe. These programs are almost identical, function pretty much
-the same way, and are generated from the same source file. The
-wrapper programs are used by copying them to the directory containing
-the script they are to wrap and with the same name as the script they
-are to wrap.
-"""
-
-from __future__ import absolute_import
-
-import sys
-import textwrap
-import subprocess
-
-import pytest
-
-from setuptools.command.easy_install import nt_quote_arg
-import pkg_resources
-
-
-pytestmark = pytest.mark.skipif(sys.platform != 'win32', reason="Windows only")
-
-
-class WrapperTester:
-
- @classmethod
- def prep_script(cls, template):
- python_exe = nt_quote_arg(sys.executable)
- return template % locals()
-
- @classmethod
- def create_script(cls, tmpdir):
- """
- Create a simple script, foo-script.py
-
- Note that the script starts with a Unix-style '#!' line saying which
- Python executable to run. The wrapper will use this line to find the
- correct Python executable.
- """
-
- script = cls.prep_script(cls.script_tmpl)
-
- with (tmpdir / cls.script_name).open('w') as f:
- f.write(script)
-
- # also copy cli.exe to the sample directory
- with (tmpdir / cls.wrapper_name).open('wb') as f:
- w = pkg_resources.resource_string('setuptools', cls.wrapper_source)
- f.write(w)
-
-
-class TestCLI(WrapperTester):
- script_name = 'foo-script.py'
- wrapper_source = 'cli-32.exe'
- wrapper_name = 'foo.exe'
- script_tmpl = textwrap.dedent("""
- #!%(python_exe)s
- import sys
- input = repr(sys.stdin.read())
- print(sys.argv[0][-14:])
- print(sys.argv[1:])
- print(input)
- if __debug__:
- print('non-optimized')
- """).lstrip()
-
- def test_basic(self, tmpdir):
- """
- When the copy of cli.exe, foo.exe in this example, runs, it examines
- the path name it was run with and computes a Python script path name
- by removing the '.exe' suffix and adding the '-script.py' suffix. (For
- GUI programs, the suffix '-script.pyw' is added.) This is why we
- named out script the way we did. Now we can run out script by running
- the wrapper:
-
- This example was a little pathological in that it exercised windows
- (MS C runtime) quoting rules:
-
- - Strings containing spaces are surrounded by double quotes.
-
- - Double quotes in strings need to be escaped by preceding them with
- back slashes.
-
- - One or more backslashes preceding double quotes need to be escaped
- by preceding each of them with back slashes.
- """
- self.create_script(tmpdir)
- cmd = [
- str(tmpdir / 'foo.exe'),
- 'arg1',
- 'arg 2',
- 'arg "2\\"',
- 'arg 4\\',
- 'arg5 a\\\\b',
- ]
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
- stdout, stderr = proc.communicate('hello\nworld\n'.encode('ascii'))
- actual = stdout.decode('ascii').replace('\r\n', '\n')
- expected = textwrap.dedent(r"""
- \foo-script.py
- ['arg1', 'arg 2', 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b']
- 'hello\nworld\n'
- non-optimized
- """).lstrip()
- assert actual == expected
-
- def test_with_options(self, tmpdir):
- """
- Specifying Python Command-line Options
- --------------------------------------
-
- You can specify a single argument on the '#!' line. This can be used
- to specify Python options like -O, to run in optimized mode or -i
- to start the interactive interpreter. You can combine multiple
- options as usual. For example, to run in optimized mode and
- enter the interpreter after running the script, you could use -Oi:
- """
- self.create_script(tmpdir)
- tmpl = textwrap.dedent("""
- #!%(python_exe)s -Oi
- import sys
- input = repr(sys.stdin.read())
- print(sys.argv[0][-14:])
- print(sys.argv[1:])
- print(input)
- if __debug__:
- print('non-optimized')
- sys.ps1 = '---'
- """).lstrip()
- with (tmpdir / 'foo-script.py').open('w') as f:
- f.write(self.prep_script(tmpl))
- cmd = [str(tmpdir / 'foo.exe')]
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
- stdout, stderr = proc.communicate()
- actual = stdout.decode('ascii').replace('\r\n', '\n')
- expected = textwrap.dedent(r"""
- \foo-script.py
- []
- ''
- ---
- """).lstrip()
- assert actual == expected
-
-
-class TestGUI(WrapperTester):
- """
- Testing the GUI Version
- -----------------------
- """
- script_name = 'bar-script.pyw'
- wrapper_source = 'gui-32.exe'
- wrapper_name = 'bar.exe'
-
- script_tmpl = textwrap.dedent("""
- #!%(python_exe)s
- import sys
- f = open(sys.argv[1], 'wb')
- bytes_written = f.write(repr(sys.argv[2]).encode('utf-8'))
- f.close()
- """).strip()
-
- def test_basic(self, tmpdir):
- """Test the GUI version with the simple scipt, bar-script.py"""
- self.create_script(tmpdir)
-
- cmd = [
- str(tmpdir / 'bar.exe'),
- str(tmpdir / 'test_output.txt'),
- 'Test Argument',
- ]
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
- stdout, stderr = proc.communicate()
- assert not stdout
- assert not stderr
- with (tmpdir / 'test_output.txt').open('rb') as f_out:
- actual = f_out.read().decode('ascii')
- assert actual == repr('Test Argument')
diff --git a/setuptools/tests/textwrap.py b/setuptools/tests/textwrap.py
deleted file mode 100644
index 5cd9e5bc..00000000
--- a/setuptools/tests/textwrap.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from __future__ import absolute_import
-
-import textwrap
-
-
-def DALS(s):
- "dedent and left-strip"
- return textwrap.dedent(s).lstrip()
diff --git a/setuptools/unicode_utils.py b/setuptools/unicode_utils.py
deleted file mode 100644
index ffab3e24..00000000
--- a/setuptools/unicode_utils.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import unicodedata
-import sys
-
-from setuptools.extern import six
-
-# HFS Plus uses decomposed UTF-8
-def decompose(path):
- if isinstance(path, six.text_type):
- return unicodedata.normalize('NFD', path)
- try:
- path = path.decode('utf-8')
- path = unicodedata.normalize('NFD', path)
- path = path.encode('utf-8')
- except UnicodeError:
- pass # Not UTF-8
- return path
-
-
-def filesys_decode(path):
- """
- Ensure that the given path is decoded,
- NONE when no expected encoding works
- """
-
- if isinstance(path, six.text_type):
- return path
-
- fs_enc = sys.getfilesystemencoding() or 'utf-8'
- candidates = fs_enc, 'utf-8'
-
- for enc in candidates:
- try:
- return path.decode(enc)
- except UnicodeDecodeError:
- continue
-
-
-def try_encode(string, enc):
- "turn unicode encoding into a functional routine"
- try:
- return string.encode(enc)
- except UnicodeEncodeError:
- return None
diff --git a/setuptools/utils.py b/setuptools/utils.py
deleted file mode 100644
index 91e4b87f..00000000
--- a/setuptools/utils.py
+++ /dev/null
@@ -1,11 +0,0 @@
-import os
-import os.path
-
-
-def cs_path_exists(fspath):
- if not os.path.exists(fspath):
- return False
- # make absolute so we always have a directory
- abspath = os.path.abspath(fspath)
- directory, filename = os.path.split(abspath)
- return filename in os.listdir(directory) \ No newline at end of file
diff --git a/setuptools/version.py b/setuptools/version.py
deleted file mode 100644
index 049e7feb..00000000
--- a/setuptools/version.py
+++ /dev/null
@@ -1,6 +0,0 @@
-import pkg_resources
-
-try:
- __version__ = pkg_resources.require('setuptools')[0].version
-except Exception:
- __version__ = 'unknown'
diff --git a/setuptools/windows_support.py b/setuptools/windows_support.py
deleted file mode 100644
index cb977cff..00000000
--- a/setuptools/windows_support.py
+++ /dev/null
@@ -1,29 +0,0 @@
-import platform
-import ctypes
-
-
-def windows_only(func):
- if platform.system() != 'Windows':
- return lambda *args, **kwargs: None
- return func
-
-
-@windows_only
-def hide_file(path):
- """
- Set the hidden attribute on a file or directory.
-
- From http://stackoverflow.com/questions/19622133/
-
- `path` must be text.
- """
- __import__('ctypes.wintypes')
- SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW
- SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD
- SetFileAttributes.restype = ctypes.wintypes.BOOL
-
- FILE_ATTRIBUTE_HIDDEN = 0x02
-
- ret = SetFileAttributes(path, FILE_ATTRIBUTE_HIDDEN)
- if not ret:
- raise ctypes.WinError()
diff --git a/tests/manual_test.py b/tests/manual_test.py
deleted file mode 100644
index 808fa55a..00000000
--- a/tests/manual_test.py
+++ /dev/null
@@ -1,92 +0,0 @@
-#!/usr/bin/env python
-
-import sys
-import os
-import shutil
-import tempfile
-import subprocess
-from distutils.command.install import INSTALL_SCHEMES
-from string import Template
-
-from six.moves import urllib
-
-def _system_call(*args):
- assert subprocess.call(args) == 0
-
-def tempdir(func):
- def _tempdir(*args, **kwargs):
- test_dir = tempfile.mkdtemp()
- old_dir = os.getcwd()
- os.chdir(test_dir)
- try:
- return func(*args, **kwargs)
- finally:
- os.chdir(old_dir)
- shutil.rmtree(test_dir)
- return _tempdir
-
-SIMPLE_BUILDOUT = """\
-[buildout]
-
-parts = eggs
-
-[eggs]
-recipe = zc.recipe.egg
-
-eggs =
- extensions
-"""
-
-BOOTSTRAP = 'http://downloads.buildout.org/1/bootstrap.py'
-PYVER = sys.version.split()[0][:3]
-
-_VARS = {'base': '.',
- 'py_version_short': PYVER}
-
-scheme = 'nt' if sys.platform == 'win32' else 'unix_prefix'
-PURELIB = INSTALL_SCHEMES[scheme]['purelib']
-
-
-@tempdir
-def test_virtualenv():
- """virtualenv with setuptools"""
- purelib = os.path.abspath(Template(PURELIB).substitute(**_VARS))
- _system_call('virtualenv', '--no-site-packages', '.')
- _system_call('bin/easy_install', 'setuptools==dev')
- # linux specific
- site_pkg = os.listdir(purelib)
- site_pkg.sort()
- assert 'setuptools' in site_pkg[0]
- easy_install = os.path.join(purelib, 'easy-install.pth')
- with open(easy_install) as f:
- res = f.read()
- assert 'setuptools' in res
-
-@tempdir
-def test_full():
- """virtualenv + pip + buildout"""
- _system_call('virtualenv', '--no-site-packages', '.')
- _system_call('bin/easy_install', '-q', 'setuptools==dev')
- _system_call('bin/easy_install', '-qU', 'setuptools==dev')
- _system_call('bin/easy_install', '-q', 'pip')
- _system_call('bin/pip', 'install', '-q', 'zc.buildout')
-
- with open('buildout.cfg', 'w') as f:
- f.write(SIMPLE_BUILDOUT)
-
- with open('bootstrap.py', 'w') as f:
- f.write(urllib.request.urlopen(BOOTSTRAP).read())
-
- _system_call('bin/python', 'bootstrap.py')
- _system_call('bin/buildout', '-q')
- eggs = os.listdir('eggs')
- eggs.sort()
- assert len(eggs) == 3
- assert eggs[1].startswith('setuptools')
- del eggs[1]
- assert eggs == ['extensions-0.3-py2.6.egg',
- 'zc.recipe.egg-1.2.2-py2.6.egg']
-
-if __name__ == '__main__':
- test_virtualenv()
- test_full()
diff --git a/tox.ini b/tox.ini
deleted file mode 100644
index 9061869f..00000000
--- a/tox.ini
+++ /dev/null
@@ -1,5 +0,0 @@
-[tox]
-envlist = py26,py27,py31,py32,py33,py34
-
-[testenv]
-commands=python setup.py test