diff options
496 files changed, 35319 insertions, 3720 deletions
diff --git a/.bumpversion.cfg b/.bumpversion.cfg index a9d31cd2..1125d38d 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 59.2.0 +current_version = 62.1.0 commit = True tag = True diff --git a/.codecov.yml b/.codecov.yml index 7510dfc6..bb829c41 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -2,4 +2,11 @@ comment: false coverage: status: project: - threshold: 0.5% + default: + informational: true # Treat coverage info as informational only + threshold: 0.5% + patch: + default: + informational: true # Treat coverage info as informational only +github_checks: + annotations: false # Codecov may pollute the "files" diff view diff --git a/.coveragerc b/.coveragerc index 6a34e662..3153808d 100644 --- a/.coveragerc +++ b/.coveragerc @@ -2,6 +2,7 @@ omit = # leading `*/` for pytest-dev/pytest-cov#456 */.tox/* + */_validate_pyproject/* # generated code, tested in `validate-pyproject` [report] show_missing = True @@ -8,6 +8,7 @@ extend-exclude = build setuptools/_vendor setuptools/_distutils + setuptools/config/_validate_pyproject/fastjsonschema_* pkg_resources/_vendor extend-ignore = diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml index 73911ec8..672acd18 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.yml +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -115,15 +115,4 @@ body: validations: required: true - -- type: checkboxes - attributes: - label: Code of Conduct - description: | - Read the [PSF Code of Conduct][CoC] first. - - [CoC]: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md - options: - - label: I agree to follow the PSF Code of Conduct - required: true ... diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index dde102ca..ebc2d339 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,5 +1,3 @@ -# Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser -blank_issues_enabled: false # default: true contact_links: - name: 🤔 Have questions or need support? url: https://github.com/pypa/setuptools/discussions @@ -9,7 +7,6 @@ contact_links: about: | Please ask typical Q&A here: general ideas for Python packaging, questions about structuring projects and so on -- name: >- - 💬 IRC: #pypa @ Freenode - url: https://webchat.freenode.net/#pypa +- name: 💬 Discord (chat) + url: https://discord.com/invite/pypa about: Chat with devs diff --git a/.github/workflows/ci-sage.yml b/.github/workflows/ci-sage.yml new file mode 100644 index 00000000..425681d7 --- /dev/null +++ b/.github/workflows/ci-sage.yml @@ -0,0 +1,152 @@ +name: Run Sage CI for Linux + +## This GitHub Actions workflow provides: +## +## - portability testing, by building and testing this project on many platforms +## +## - continuous integration, by building and testing other software +## that depends on this project. +## +## It runs on every push of a tag to the GitHub repository. +## +## The testing can be monitored in the "Actions" tab of the GitHub repository. +## +## After all jobs have finished (or are canceled) and a short delay, +## tar files of all logs are made available as "build artifacts". +## +## This GitHub Actions workflow uses the portability testing framework +## of SageMath (https://www.sagemath.org/). For more information, see +## https://doc.sagemath.org/html/en/developer/portability_testing.html + +## The workflow consists of two jobs: +## +## - First, it builds a source distribution of the project +## and generates a script "update-pkgs.sh". It uploads them +## as a build artifact named upstream. +## +## - Second, it checks out a copy of the SageMath source tree. +## It downloads the upstream artifact and replaces the project's +## package in the SageMath distribution by the newly packaged one +## from the upstream artifact, by running the script "update-pkgs.sh". +## Then it builds a small portion of the Sage distribution. +## +## Many copies of the second step are run in parallel for each of the tested +## systems/configurations. + +on: + push: + tags: + - '*' + workflow_dispatch: + # Allow to run manually + +env: + # Ubuntu packages to install so that the project's "setup.py sdist" can succeed + DIST_PREREQ: python3 + # Name of this project in the Sage distribution + SPKG: setuptools + # Sage distribution packages to build + TARGETS_PRE: build/make/Makefile + TARGETS: setuptools pyzmq + TARGETS_OPTIONAL: build/make/Makefile + # Standard setting: Test the current beta release of Sage: + SAGE_REPO: sagemath/sage + SAGE_REF: develop + # Test with the branch from https://trac.sagemath.org/ticket/33288 + # This may provide hotfixes for the CI that have not been merged into + # the sage develop branch yet. + SAGE_TRAC_GIT: https://github.com/sagemath/sagetrac-mirror.git + SAGE_TICKET: 33288 + REMOVE_PATCHES: "*" + +jobs: + + dist: + runs-on: ubuntu-latest + steps: + - name: Check out ${{ env.SPKG }} + uses: actions/checkout@v2 + with: + path: build/pkgs/${{ env.SPKG }}/src + - name: Install prerequisites + run: | + sudo DEBIAN_FRONTEND=noninteractive apt-get update + sudo DEBIAN_FRONTEND=noninteractive apt-get install $DIST_PREREQ + python3 -m pip install build + - name: Run make dist, prepare upstream artifact + run: | + (cd build/pkgs/${{ env.SPKG }}/src && python3 -m build --sdist) \ + && mkdir -p upstream && cp build/pkgs/${{ env.SPKG }}/src/dist/*.tar.gz upstream/${{ env.SPKG }}-git.tar.gz \ + && echo "sage-package create ${{ env.SPKG }} --version git --tarball ${{ env.SPKG }}-git.tar.gz --type=standard" > upstream/update-pkgs.sh \ + && if [ -n "${{ env.REMOVE_PATCHES }}" ]; then echo "(cd ../build/pkgs/${{ env.SPKG }}/patches && rm -f ${{ env.REMOVE_PATCHES }}; :)" >> upstream/update-pkgs.sh; fi \ + && ls -l upstream/ + - uses: actions/upload-artifact@v2 + with: + path: upstream + name: upstream + + docker: + runs-on: ubuntu-latest + needs: [dist] + strategy: + fail-fast: false + max-parallel: 32 + matrix: + tox_system_factor: [ubuntu-trusty, ubuntu-xenial, ubuntu-bionic, ubuntu-focal, ubuntu-hirsute, ubuntu-impish, ubuntu-jammy, debian-stretch, debian-buster, debian-bullseye, debian-bookworm, debian-sid, linuxmint-17, linuxmint-18, linuxmint-19, linuxmint-19.3, linuxmint-20.1, linuxmint-20.2, linuxmint-20.3, fedora-26, fedora-27, fedora-28, fedora-29, fedora-30, fedora-31, fedora-32, fedora-33, fedora-34, fedora-35, centos-7, centos-stream-8, centos-stream-9, gentoo-python3.9, archlinux-latest, opensuse-15, opensuse-15.3, opensuse-tumbleweed, slackware-14.2, ubuntu-bionic-i386, manylinux-2_24-i686, debian-buster-i386, centos-7-i386] + tox_packages_factor: [minimal, standard] + env: + TOX_ENV: docker-${{ matrix.tox_system_factor }}-${{ matrix.tox_packages_factor }} + LOGS_ARTIFACT_NAME: logs-commit-${{ github.sha }}-tox-docker-${{ matrix.tox_system_factor }}-${{ matrix.tox_packages_factor }} + DOCKER_TARGETS: configured with-targets with-targets-optional + steps: + - name: Check out SageMath + uses: actions/checkout@v2 + with: + repository: ${{ env.SAGE_REPO }} + ref: ${{ env.SAGE_REF }} + fetch-depth: 2000 + if: env.SAGE_REPO != '' + - name: Check out git-trac-command + uses: actions/checkout@v2 + with: + repository: sagemath/git-trac-command + path: git-trac-command + if: env.SAGE_TRAC_GIT != '' + - name: Check out SageMath from trac.sagemath.org + shell: bash {0} + run: | + git config --global user.email "ci-sage@example.com" + git config --global user.name "ci-sage workflow" + if [ ! -d .git ]; then git init; fi; git remote add trac ${{ env.SAGE_TRAC_GIT }} && x=1 && while [ $x -le 5 ]; do x=$(( $x + 1 )); sleep $(( $RANDOM % 60 + 1 )); if git-trac-command/git-trac fetch $SAGE_TICKET; then git merge FETCH_HEAD || echo "(ignored)"; exit 0; fi; sleep 40; done; exit 1 + if: env.SAGE_TRAC_GIT != '' + - uses: actions/download-artifact@v2 + with: + path: upstream + name: upstream + - name: Install test prerequisites + run: | + sudo DEBIAN_FRONTEND=noninteractive apt-get update + sudo DEBIAN_FRONTEND=noninteractive apt-get install tox python3-setuptools + - name: Update Sage packages from upstream artifact + run: | + (export PATH=$(pwd)/build/bin:$PATH; (cd upstream && bash -x update-pkgs.sh) && sed -i.bak '/upstream/d' .dockerignore && echo "/:toolchain:/i ADD upstream upstream" | sed -i.bak -f - build/bin/write-dockerfile.sh && git diff) + - name: Configure and build Sage distribution within a Docker container + run: | + set -o pipefail; EXTRA_DOCKER_BUILD_ARGS="--build-arg USE_MAKEFLAGS=\"-k V=0 SAGE_NUM_THREADS=3\"" tox -e $TOX_ENV -- $TARGETS 2>&1 | sed "/^configure: notice:/s|^|::warning file=artifacts/$LOGS_ARTIFACT_NAME/config.log::|;/^configure: warning:/s|^|::warning file=artifacts/$LOGS_ARTIFACT_NAME/config.log::|;/^configure: error:/s|^|::error file=artifacts/$LOGS_ARTIFACT_NAME/config.log::|;" + - name: Copy logs from the Docker image or build container + run: | + mkdir -p "artifacts/$LOGS_ARTIFACT_NAME" + cp -r .tox/$TOX_ENV/Dockerfile .tox/$TOX_ENV/log "artifacts/$LOGS_ARTIFACT_NAME" + if [ -f .tox/$TOX_ENV/Dockertags ]; then CONTAINERS=$(docker create $(tail -1 .tox/$TOX_ENV/Dockertags) /bin/bash || true); fi + if [ -n "$CONTAINERS" ]; then for CONTAINER in $CONTAINERS; do for ARTIFACT in /sage/logs; do docker cp $CONTAINER:$ARTIFACT artifacts/$LOGS_ARTIFACT_NAME && HAVE_LOG=1; done; if [ -n "$HAVE_LOG" ]; then break; fi; done; fi + if: always() + - uses: actions/upload-artifact@v2 + with: + path: artifacts + name: ${{ env.LOGS_ARTIFACT_NAME }} + if: always() + - name: Print out logs for immediate inspection + # and markup the output with GitHub Actions logging commands + run: | + .github/workflows/scan-logs.sh "artifacts/$LOGS_ARTIFACT_NAME" + if: always() diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 265be849..4275bbde 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -1,50 +1,129 @@ name: tests -on: [push, pull_request] +on: [push, pull_request, workflow_dispatch] + +concurrency: + group: >- + ${{ github.workflow }}- + ${{ github.ref_type }}- + ${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true jobs: test: strategy: matrix: distutils: - - stdlib - local python: - - pypy3 - - 3.6 + - pypy-3.7 + - 3.7 + - 3.8 - 3.9 - "3.10" platform: - ubuntu-latest - macos-latest - windows-latest + include: + - platform: ubuntu-latest + python: "3.10" + distutils: stdlib runs-on: ${{ matrix.platform }} env: SETUPTOOLS_USE_DISTUTILS: ${{ matrix.distutils }} + timeout-minutes: 75 steps: - uses: actions/checkout@v2 - name: Setup Python uses: actions/setup-python@v2 with: python-version: ${{ matrix.python }} + - uses: actions/cache@v3 + id: cache + with: + path: setuptools/tests/config/downloads/*.cfg + key: >- + ${{ hashFiles('setuptools/tests/config/setupcfg_examples.txt') }}- + ${{ hashFiles('setuptools/tests/config/downloads/*.py') }} + - name: Populate download cache + if: steps.cache.outputs.cache-hit != 'true' + working-directory: setuptools/tests/config + run: python -m downloads.preload setupcfg_examples.txt - name: Install tox run: | python -m pip install tox - name: Run tests - run: tox -- --cov-report xml + run: tox + - name: Create coverage report + if: hashFiles('.coverage') != '' # Rudimentary `file.exists()` + run: pipx run coverage xml --ignore-errors - name: Publish coverage - if: false # disabled for #2727 + if: hashFiles('coverage.xml') != '' # Rudimentary `file.exists()` uses: codecov/codecov-action@v1 with: flags: >- # Mark which lines are covered by which envs ${{ runner.os }}, ${{ matrix.python }} - release: + test_cygwin: + runs-on: windows-latest + timeout-minutes: 75 + steps: + - uses: actions/checkout@v2 + - name: Install Cygwin with Python + uses: cygwin/cygwin-install-action@v1 + with: + platform: x86_64 + packages: >- + git, + gcc-core, + python38, + python38-devel, + python38-pip + - name: Install tox + shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0} + run: | + python3.8 -m pip install tox + - name: Run tests + shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0} + run: | + tox -- --cov-report xml + + integration-test: needs: test - if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') + if: github.event_name == 'workflow_dispatch' || (github.event_name == 'push' && contains(github.ref, 'refs/tags/')) + # To avoid long times and high resource usage, we assume that: + # 1. The setuptools APIs used by packages don't vary too much with OS or + # Python implementation + # 2. Any circumstance for which the previous assumption is not valid is + # already tested via unit tests (or other tests not classified here as + # "integration") + # With that in mind, the integration tests can run for a single setup runs-on: ubuntu-latest + timeout-minutes: 75 + steps: + - uses: actions/checkout@v2 + - name: Install OS-level dependencies + run: | + sudo apt-get update + sudo apt-get install build-essential gfortran libopenblas-dev + - name: Setup Python + uses: actions/setup-python@v2 + with: + # Use a release that is not very new but still have a long life: + python-version: "3.8" + - name: Install tox + run: | + python -m pip install tox + - name: Run integration tests + run: tox -e integration + release: + needs: [test, test_cygwin, integration-test] + if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') + runs-on: ubuntu-latest + timeout-minutes: 75 steps: - uses: actions/checkout@v2 - name: Setup Python @@ -6,7 +6,6 @@ docs/build include lib distribute.egg-info -foo.egg-info setuptools.egg-info .coverage .eggs diff --git a/CHANGES.rst b/CHANGES.rst index 3206647a..5061ecb9 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,680 @@ +v62.1.0 +------- + + +Changes +^^^^^^^ +* #3258: Merge pypa/distutils@5229dad46b. + +Misc +^^^^ +* #3249: Simplified ``package_dir`` obtained via auto-discovery. + + +v62.0.0 +------- + + +Breaking Changes +^^^^^^^^^^^^^^^^ +* #3151: Made ``setup.py develop --user`` install to the user site packages directory even if it is disabled in the current interpreter. + +Changes +^^^^^^^ +* #3153: When resolving requirements use both canonical and normalized names -- by :user:`ldaniluk` +* #3167: Honor unix file mode in ZipFile when installing wheel via ``install_as_egg`` -- by :user:`delijati` + +Misc +^^^^ +* #3088: Fixed duplicated tag with the ``dist-info`` command. +* #3247: Fixed problem preventing ``readme`` specified as dynamic in ``pyproject.toml`` + from being dynamically specified in ``setup.py``. + + +v61.3.1 +------- + + +Misc +^^^^ +* #3233: Included missing test file ``setupcfg_examples.txt`` in ``sdist``. +* #3233: Added script that allows developers to download ``setupcfg_examples.txt`` prior to + running tests. By caching these files it should be possible to run the test suite + offline. + + +v61.3.0 +------- + + +Changes +^^^^^^^ +* #3229: Disabled automatic download of ``trove-classifiers`` to facilitate reproducibility. + +Misc +^^^^ +* #3229: Updated ``pyproject.toml`` validation via ``validate-pyproject`` v0.7.1. +* #3229: New internal tool made available for updating the code responsible for + the validation of ``pyproject.toml``. + This tool can be executed via ``tox -e generate-validation-code``. + + +v61.2.0 +------- + + +Changes +^^^^^^^ +* #3215: Ignored a subgroup of invalid ``pyproject.toml`` files that use the ``[project]`` + table to specify only ``requires-python`` (**transitional**). + + .. warning:: + Please note that future releases of setuptools will halt the build process + if a ``pyproject.toml`` file that does not match doc:`the PyPA Specification + <PyPUG:specifications/declaring-project-metadata>` is given. +* #3215: Updated ``pyproject.toml`` validation, as generated by ``validate-pyproject==0.6.1``. +* #3218: Prevented builds from erroring if the project specifies metadata via + ``pyproject.toml``, but uses other files (e.g. ``setup.py``) to complement it, + without setting ``dynamic`` properly. + + .. important:: + This is a **transitional** behaviour. + Future releases of ``setuptools`` may simply ignore externally set metadata + not backed by ``dynamic`` or even halt the build with an error. +* #3224: Merge changes from pypa/distutils@e1d5c9b1f6 + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3217: Fixed typo in ``pyproject.toml`` example in Quickstart -- by :user:`pablo-cardenas`. + +Misc +^^^^ +* #3223: Fixed missing requirements with environment markers when + ``optional-dependencies`` is set in ``pyproject.toml``. + + +v61.1.1 +------- + + +Misc +^^^^ +* #3212: Fixed missing dependencies when running ``setup.py install``. + Note that calling ``setup.py install`` directly is still deprecated and + will be removed in future versions of ``setuptools``. + Please check the release notes for :ref:`setup_install_deprecation_note`. + + +v61.1.0 +------- + + +Deprecations +^^^^^^^^^^^^ +* #3206: Changed ``setuptools.convert_path`` to an internal function that is not exposed + as part of setuptools API. + Future releases of ``setuptools`` are likely to remove this function. + +Changes +^^^^^^^ +* #3202: Changed behaviour of auto-discovery to not explicitly expand ``package_dir`` + for flat-layouts and to not use relative paths starting with ``./``. +* #3203: Prevented ``pyproject.toml`` parsing from overwriting + ``dist.include_package_data`` explicitly set in ``setup.py`` with default + value. +* #3208: Added a warning for non existing files listed with the ``file`` directive in + ``setup.cfg`` and ``pyproject.toml``. +* #3208: Added a default value for dynamic ``classifiers`` in ``pyproject.toml`` when + files are missing and errors being ignored. +* #3211: Disabled auto-discovery when distribution class has a ``configuration`` + attribute (e.g. when the ``setup.py`` script contains ``setup(..., + configuration=...)``). This is done to ensure extension-only packages created + with ``numpy.distutils.misc_util.Configuration`` are not broken by the safe + guard + behaviour to avoid accidental multiple top-level packages in a flat-layout. + + .. note:: + Users that don't set ``packages``, ``py_modules``, or ``configuration`` are + still likely to observe the auto-discovery behavior, which may halt the + build if the project contains multiple directories and/or multiple Python + files directly under the project root. + + To disable auto-discovery please explicitly set either ``packages`` or + ``py_modules``. Alternatively you can also configure :ref:`custom-discovery`. + + +v61.0.0 +------- + + +Deprecations +^^^^^^^^^^^^ +* #3068: Deprecated ``setuptools.config.read_configuration``, + ``setuptools.config.parse_configuration`` and other functions or classes + from ``setuptools.config``. + + Users that still need to parse and process configuration from ``setup.cfg`` can + import a direct replacement from ``setuptools.config.setupcfg``, however this + module is transitional and might be removed in the future + (the ``setup.cfg`` configuration format itself is likely to be deprecated in the future). + +Breaking Changes +^^^^^^^^^^^^^^^^ +* #2894: If you purposefully want to create an *"empty distribution"*, please be aware + that some Python files (or general folders) might be automatically detected and + included. + + Projects that currently don't specify both ``packages`` and ``py_modules`` in their + configuration and contain extra folders or Python files (not meant for distribution), + might see these files being included in the wheel archive or even experience + the build to fail. + + You can check details about the automatic discovery (and how to configure a + different behaviour) in :doc:`/userguide/package_discovery`. +* #3067: If the file ``pyproject.toml`` exists and it includes project + metadata/config (via ``[project]`` table or ``[tool.setuptools]``), + a series of new behaviors that are not backward compatible may take place: + + - The default value of ``include_package_data`` will be considered to be ``True``. + - Setuptools will attempt to validate the ``pyproject.toml`` file according + to PEP 621 specification. + - The values specified in ``pyproject.toml`` will take precedence over those + specified in ``setup.cfg`` or ``setup.py``. + +Changes +^^^^^^^ +* #2887: **[EXPERIMENTAL]** Added automatic discovery for ``py_modules`` and ``packages`` + -- by :user:`abravalheri`. + + Setuptools will try to find these values assuming that the package uses either + the *src-layout* (a ``src`` directory containing all the packages or modules), + the *flat-layout* (package directories directly under the project root), + or the *single-module* approach (an isolated Python file, directly under + the project root). + + The automatic discovery will also respect layouts that are explicitly + configured using the ``package_dir`` option. + + For backward-compatibility, this behavior will be observed **only if both** + ``py_modules`` **and** ``packages`` **are not set**. + (**Note**: specifying ``ext_modules`` might also prevent auto-discover from + taking place) + + If setuptools detects modules or packages that are not supposed to be in the + distribution, please manually set ``py_modules`` and ``packages`` in your + ``setup.cfg`` or ``setup.py`` file. + If you are using a *flat-layout*, you can also consider switching to + *src-layout*. +* #2887: **[EXPERIMENTAL]** Added automatic configuration for the ``name`` metadata + -- by :user:`abravalheri`. + + Setuptools will adopt the name of the top-level package (or module in the case + of single-module distributions), **only when** ``name`` **is not explicitly + provided**. + + Please note that it is not possible to automatically derive a single name when + the distribution consists of multiple top-level packages or modules. +* #3066: Added vendored dependencies for :pypi:`tomli`, :pypi:`validate-pyproject`. + + These dependencies are used to read ``pyproject.toml`` files and validate them. +* #3067: **[EXPERIMENTAL]** When using ``pyproject.toml`` metadata, + the default value of ``include_package_data`` is changed to ``True``. +* #3068: **[EXPERIMENTAL]** Add support for ``pyproject.toml`` configuration + (as introduced by :pep:`621`). Configuration parameters not covered by + standards are handled in the ``[tool.setuptools]`` sub-table. + + In the future, existing ``setup.cfg`` configuration + may be automatically converted into the ``pyproject.toml`` equivalent before taking effect + (as proposed in #1688). Meanwhile users can use automated tools like + :pypi:`ini2toml` to help in the transition. + + Please note that the legacy backend is not guaranteed to work with + ``pyproject.toml`` configuration. + + -- by :user:`abravalheri` +* #3125: Implicit namespaces (as introduced in :pep:`420`) are now considered by default + during :doc:`package discovery </userguide/package_discovery>`, when + ``setuptools`` configuration and project metadata are added to the + ``pyproject.toml`` file. + + To disable this behaviour, use ``namespaces = False`` when explicitly setting + the ``[tool.setuptools.packages.find]`` section in ``pyproject.toml``. + + This change is backwards compatible and does not affect the behaviour of + configuration done in ``setup.cfg`` or ``setup.py``. +* #3152: **[EXPERIMENTAL]** Added support for ``attr:`` and ``cmdclass`` configurations + in ``setup.cfg`` and ``pyproject.toml`` when ``package_dir`` is implicitly + found via auto-discovery. +* #3178: Postponed importing ``ctypes`` when hiding files on Windows. + This helps to prevent errors in systems that might not have ``libffi`` installed. +* #3179: Merge with pypa/distutils@267dbd25ac + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3172: Added initial documentation about configuring ``setuptools`` via ``pyproject.toml`` + (using standard project metadata). + +Misc +^^^^ +* #3065: Refactored ``setuptools.config`` by separating configuration parsing (specific + to the configuration file format, e.g. ``setup.cfg``) and post-processing + (which includes directives such as ``file:`` that can be used across different + configuration formats). + + +v60.10.0 +-------- + + +Changes +^^^^^^^ +* #2971: Deprecated upload_docs command, to be removed in the future. +* #3137: Use samefile from stdlib, supported on Windows since Python 3.2. +* #3170: Adopt nspektr (vendored) to implement Distribution._install_dependencies. + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3144: Added documentation on using console_scripts from setup.py, which was previously only shown in setup.cfg -- by :user:`xhlulu` +* #3148: Added clarifications about ``MANIFEST.in``, that include links to PyPUG docs + and more prominent mentions to using a revision control system plugin as an + alternative. +* #3148: Removed mention to ``pkg_resources`` as the recommended way of accessing data + files, in favour of importlib.resources. + Additionally more emphasis was put on the fact that *package data files* reside + **inside** the *package directory* (and therefore should be *read-only*). + +Misc +^^^^ +* #3120: Added workaround for intermittent failures of backend tests on PyPy. + These tests now are marked with `XFAIL + <https://docs.pytest.org/en/stable/how-to/skipping.html>`_, instead of erroring + out directly. +* #3124: Improved configuration for :pypi:`rst-linker` (extension used to build the + changelog). +* #3133: Enhanced isolation of tests using virtual environments - PYTHONPATH is not leaking to spawned subprocesses -- by :user:`befeleme` +* #3147: Added options to provide a pre-built ``setuptools`` wheel or sdist for being + used during tests with virtual environments. + Paths for these pre-built distribution files can now be set via the environment + variables: ``PRE_BUILT_SETUPTOOLS_SDIST`` and ``PRE_BUILT_SETUPTOOLS_WHEEL``. + + +v60.9.3 +------- + + +Misc +^^^^ +* #3093: Repaired automated release process. + + +v60.9.2 +------- + + +Misc +^^^^ +* #3035: When loading distutils from the vendored copy, rewrite ``__name__`` to ensure consistent importing from inside and out. + + +v60.9.1 +------- + + +Misc +^^^^ +* #3102: Prevent vendored importlib_metadata from loading distributions from older importlib_metadata. +* #3103: Fixed issue where string-based entry points would be omitted. +* #3107: Bump importlib_metadata to 4.11.1 addressing issue with parsing requirements in egg-info as found in PyPy. + + +v60.9.0 +------- + + +Changes +^^^^^^^ +* #2876: In the build backend, allow single config settings to be supplied. +* #2993: Removed workaround in distutils hack for get-pip now that pypa/get-pip#137 is closed. +* #3085: Setuptools no longer relies on ``pkg_resources`` for entry point handling. +* #3098: Bump vendored packaging to 21.3. +* Removed bootstrap script. + + +v60.8.2 +------- + + +Misc +^^^^ +* #3091: Make ``concurrent.futures`` import lazy in vendored ``more_itertools`` + package to a avoid importing threading as a side effect (which caused + `gevent/gevent#1865 <https://github.com/gevent/gevent/issues/1865>`__). + -- by :user:`maciejp-ro` + + +v60.8.1 +------- + + +Misc +^^^^ +* #3084: When vendoring jaraco packages, ensure the namespace package is converted to a simple package to support zip importer. + + +v60.8.0 +------- + + +Changes +^^^^^^^ +* #3085: Setuptools now vendors importlib_resources and importlib_metadata and jaraco.text. Setuptools no longer relies on pkg_resources for ensure_directory nor parse_requirements. + + +v60.7.1 +------- + + +Misc +^^^^ +* #3072: Remove lorem_ipsum from jaraco.text when vendored. + + +v60.7.0 +------- + + +Changes +^^^^^^^ +* #3061: Vendored jaraco.text and use line processing from that library in pkg_resources. + +Misc +^^^^ +* #3070: Avoid AttributeError in easy_install.create_home_path when sysconfig.get_config_vars values are not strings. + + +v60.6.0 +------- + + +Changes +^^^^^^^ +* #3043: Merge with pypa/distutils@bb018f1ac3 including consolidated behavior in sysconfig.get_platform (pypa/distutils#104). +* #3057: Don't include optional ``Home-page`` in metadata if no ``url`` is specified. -- by :user:`cdce8p` +* #3062: Merge with pypa/distutils@b53a824ec3 including improved support for lib directories on non-x64 Windows builds. + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #2897: Added documentation about wrapping ``setuptools.build_meta`` in a in-tree + custom backend. This is a :pep:`517`-compliant way of dynamically specifying + build dependencies (e.g. when platform, OS and other markers are not enough). + -- by :user:`abravalheri` +* #3034: Replaced occurrences of the defunct distutils-sig mailing list with pointers + to GitHub Discussions. + -- by :user:`ashemedai` +* #3056: The documentation has stopped suggesting to add ``wheel`` to + :pep:`517` requirements -- by :user:`webknjaz` + +Misc +^^^^ +* #3054: Used Py3 syntax ``super().__init__()`` -- by :user:`imba-tjd` + + +v60.5.4 +------- + + +Misc +^^^^ +* #3009: Remove filtering of distutils warnings. +* #3031: Suppress distutils replacement when building or testing CPython. + + +v60.5.3 +------- + + +Misc +^^^^ +* #3026: Honor sysconfig variables in easy_install. + + +v60.5.2 +------- + + +Misc +^^^^ +* #2993: In _distutils_hack, for get-pip, simulate existence of setuptools. + + +v60.5.1 +------- + + +Misc +^^^^ +* #2918: Correct support for Python 3 native loaders. + + +v60.5.0 +------- + + +Changes +^^^^^^^ +* #2990: Set the ``.origin`` attribute of the ``distutils`` module to the module's ``__file__``. + + +v60.4.0 +------- + + +Changes +^^^^^^^ +* #2839: Removed ``requires`` sorting when installing wheels as an egg dir. +* #2953: Fixed a bug that easy install incorrectly parsed Python 3.10 version string. +* #3006: Fixed startup performance issue of Python interpreter due to imports of + costly modules in ``_distutils_hack`` -- by :user:`tiran` + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #2674: Added link to additional resources on packaging in Quickstart guide +* #3008: "In-tree" Sphinx extension for "favicons" replaced with ``sphinx-favicon``. +* #3008: SVG images (logo, banners, ...) optimised with the help of the ``scour`` + package. + +Misc +^^^^ +* #2862: Added integration tests that focus on building and installing some packages in + the Python ecosystem via ``pip`` -- by :user:`abravalheri` +* #2952: Modified "vendoring" logic to keep license files. +* #2968: Improved isolation for some tests that where inadvertently using the project + root for builds, and therefore creating directories (e.g. ``build``, ``dist``, + ``*.egg-info``) that could interfere with the outcome of other tests + -- by :user:`abravalheri`. +* #2968: Introduced new test fixtures ``venv``, ``venv_without_setuptools``, + ``bare_venv`` that rely on the ``jaraco.envs`` package. + These new test fixtures were also used to remove the (currently problematic) + dependency on the ``pytest_virtualenv`` plugin. +* #2968: Removed ``tmp_src`` test fixture. Previously this fixture was copying all the + files and folders under the project root, including the ``.git`` directory, + which is error prone and increases testing time. + + Since ``tmp_src`` was used to populate virtual environments (installing the + version of ``setuptools`` under test via the source tree), it was replaced by + the new ``setuptools_sdist`` and ``setuptools_wheel`` fixtures (that are build + only once per session testing and can be shared between all the workers for + read-only usage). + + +v60.3.1 +------- + + +Misc +^^^^ +* #3002: Suppress AttributeError when detecting get-pip. + + +v60.3.0 +------- + + +Changes +^^^^^^^ +* #2993: In _distutils_hack, bypass the distutils exception for pip when get-pip is being invoked, because it imports setuptools. + +Misc +^^^^ +* #2989: Merge with pypa/distutils@788cc159. Includes fix for config vars missing from sysconfig. + + +v60.2.0 +------- + + +Changes +^^^^^^^ +* #2974: Setuptools now relies on the Python logging infrastructure to log messages. Instead of using ``distutils.log.*``, use ``logging.getLogger(name).*``. +* #2987: Sync with pypa/distutils@2def21c5d74fdd2fe7996ee4030ac145a9d751bd, including fix for missing get_versions attribute (#2969), more reliance on sysconfig from stdlib. + +Misc +^^^^ +* #2962: Avoid attempting to use local distutils when the presiding version of Setuptools on the path doesn't have one. +* #2983: Restore 'add_shim' as the way to invoke the hook. Avoids compatibility issues between different versions of Setuptools with the distutils local implementation. + + +v60.1.1 +------- + + +Misc +^^^^ +* #2980: Bypass distutils loader when setuptools module is no longer available on sys.path. + + +v60.1.0 +------- + + +Changes +^^^^^^^ +* #2958: In distutils_hack, only add the metadata finder once. In ensure_local_distutils, rely on a context manager for reliable manipulation. +* #2963: Merge with pypa/distutils@a5af364910. Includes revisited fix for pypa/distutils#15 and improved MinGW/Cygwin support from pypa/distutils#77. + + +v60.0.5 +------- + + +Misc +^^^^ +* #2960: Install schemes fall back to default scheme for headers. + + +v60.0.4 +------- + + +Misc +^^^^ +* #2954: Merge with pypa/distutils@eba2bcd310. Adds platsubdir to config vars available for substitution. + + +v60.0.3 +------- + + +Misc +^^^^ +* #2940: Avoid KeyError in distutils hack when pip is imported during ensurepip. + + +v60.0.2 +------- + + +Misc +^^^^ +* #2938: Select 'posix_user' for the scheme unless falling back to stdlib, then use 'unix_user'. + + +v60.0.1 +------- + + +Misc +^^^^ +* #2944: Add support for extended install schemes in easy_install. + + +v60.0.0 +------- + + +Breaking Changes +^^^^^^^^^^^^^^^^ +* #2896: Setuptools once again makes its local copy of distutils the default. To override, set SETUPTOOLS_USE_DISTUTILS=stdlib. + + +v59.8.0 +------- + + +Changes +^^^^^^^ +* #2935: Merge pypa/distutils@460b59f0e68dba17e2465e8dd421bbc14b994d1f. + + +v59.7.0 +------- + + +Changes +^^^^^^^ +* #2930: Require Python 3.7 + + +v59.6.0 +------- + + +Changes +^^^^^^^ +* #2925: Merge with pypa/distutils@92082ee42c including introduction of deprecation warning on Version classes. + + +v59.5.0 +------- + + +Changes +^^^^^^^ +* #2914: Merge with pypa/distutils@8f2df0bf6. + + +v59.4.0 +------- + + +Changes +^^^^^^^ +* #2893: Restore deprecated support for newlines in the Summary field. + + +v59.3.0 +------- + + +Changes +^^^^^^^ +* #2902: Merge with pypa/distutils@85db7a41242. + +Misc +^^^^ +* #2906: In ensure_local_distutils, re-use DistutilsMetaFinder to load the module. Avoids race conditions when _distutils_system_mod is employed. + + v59.2.0 ------- @@ -134,6 +811,8 @@ Documentation changes :user:`abravalheri` +.. _setup_install_deprecation_note: + v58.3.0 ------- @@ -166,7 +845,7 @@ Changes Documentation changes ^^^^^^^^^^^^^^^^^^^^^ -* #2792: Document how the legacy and non-legacy versions are compared, and reference to the `PEP 440 <https://www.python.org/dev/peps/pep-0440/>`_ scheme. +* #2792: Document how the legacy and non-legacy versions are compared, and reference to the PEP 440 scheme. v58.1.0 @@ -233,7 +912,7 @@ v57.5.0 Changes ^^^^^^^ -* #2712: Added implicit globbing support for `[options.data_files]` values. +* #2712: Added implicit globbing support for ``[options.data_files]`` values. Documentation changes ^^^^^^^^^^^^^^^^^^^^^ @@ -305,7 +984,7 @@ Changes ``license_file`` (deprecated) and ``license_files`` options, relative to ``.dist-info``. - by :user:`cdce8p` * #2678: Moved Setuptools' own entry points into declarative config. -* #2680: Vendored `more_itertools <https://pypi.org/project/more-itertools>`_ for Setuptools. +* #2680: Vendored :pypi:`more_itertools` for Setuptools. * #2681: Setuptools own setup.py no longer declares setup_requires, but instead expects wheel to be installed as declared by pyproject.toml. Misc @@ -520,7 +1199,7 @@ Changes * #2481: Define ``create_module()`` and ``exec_module()`` methods in ``VendorImporter`` to get rid of ``ImportWarning`` -- by :user:`hroncok` * #2489: ``pkg_resources`` behavior for zipimport now matches the regular behavior, and finds - ``.egg-info`` (previoulsy would only find ``.dist-info``) -- by :user:`thatch` + ``.egg-info`` (previously would only find ``.dist-info``) -- by :user:`thatch` * #2529: Fixed an issue where version tags may be added multiple times @@ -531,7 +1210,7 @@ v51.2.0 Changes ^^^^^^^ * #2493: Use importlib.import_module() rather than the deprecated loader.load_module() - in pkg_resources namespace delaration -- by :user:`encukou` + in pkg_resources namespace declaration -- by :user:`encukou` Documentation changes ^^^^^^^^^^^^^^^^^^^^^ @@ -1233,7 +1912,7 @@ Breaking Changes * eggs are not supported * no support for the ``allow_hosts`` easy_install option (``index_url``/``find_links`` are still honored) * pip environment variables are honored (and take precedence over easy_install options) -* #1898: Removed the "upload" and "register" commands in favor of `twine <https://pypi.org/p/twine>`_. +* #1898: Removed the "upload" and "register" commands in favor of :pypi:`twine`. Changes ^^^^^^^ @@ -1243,7 +1922,7 @@ Changes * add support for manylinux2010 * fix use of removed 'm' ABI flag in Python 3.8 on Windows * #1861: Fix empty namespace package installation from wheel. -* #1877: Setuptools now exposes a new entry point hook "setuptools.finalize_distribution_options", enabling plugins like `setuptools_scm <https://pypi.org/project/setuptools_scm>`_ to configure options on the distribution at finalization time. +* #1877: Setuptools now exposes a new entry point hook "setuptools.finalize_distribution_options", enabling plugins like :pypi:`setuptools_scm` to configure options on the distribution at finalization time. v41.6.0 @@ -2510,7 +3189,7 @@ v26.1.0 ------- * #763: ``pkg_resources.get_default_cache`` now defers to the - `appdirs project <https://pypi.org/project/appdirs>`_ to + :pypi:`appdirs` project to resolve the cache directory. Adds a vendored dependency on appdirs to pkg_resources. @@ -3502,8 +4181,7 @@ process to fail and PyPI uploads no longer accept files for 13.0. * Issue #313: Removed built-in support for subversion. Projects wishing to retain support for subversion will need to use a third party library. The - extant implementation is being ported to `setuptools_svn - <https://pypi.org/project/setuptools_svn/>`_. + extant implementation is being ported to :pypi:`setuptools_svn`. * Issue #315: Updated setuptools to hide its own loaded modules during installation of another package. This change will enable setuptools to upgrade (or downgrade) itself even when its own metadata and implementation @@ -4007,8 +4685,7 @@ process to fail and PyPI uploads no longer accept files for 13.0. * Address security vulnerability in SSL match_hostname check as reported in Python #17997. -* Prefer `backports.ssl_match_hostname - <https://pypi.org/project/backports.ssl_match_hostname/>`_ for backport +* Prefer :pypi:`backports.ssl_match_hostname` for backport implementation if present. * Correct NameError in ``ssl_support`` module (``socket.error``). @@ -4381,8 +5058,7 @@ how it parses version numbers. Jython. * Work around Jython #1980 and Jython #1981. * Distribute #334: Provide workaround for packages that reference ``sys.__stdout__`` - such as numpy does. This change should address - `virtualenv #359 <https://github.com/pypa/virtualenv/issues/359>`_ as long + such as numpy does. This change should address pypa/virtualenv#359 as long as the system encoding is UTF-8 or the IO encoding is specified in the environment, i.e.:: @@ -4408,7 +5084,7 @@ how it parses version numbers. * BB Pull Request #14: Honor file permissions in zip files. * Distribute #327: Merged pull request #24 to fix a dependency problem with pip. -* Merged pull request #23 to fix https://github.com/pypa/virtualenv/issues/301. +* Merged pull request #23 to fix pypa/virtualenv#301. * If Sphinx is installed, the ``upload_docs`` command now runs ``build_sphinx`` to produce uploadable documentation. * Distribute #326: ``upload_docs`` provided mangled auth credentials under Python 3. diff --git a/MANIFEST.in b/MANIFEST.in index 3e8f09de..ac3308ed 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -15,3 +15,4 @@ include launcher.c include msvc-build-launcher.cmd include pytest.ini include tox.ini +include setuptools/tests/config/setupcfg_examples.txt @@ -22,7 +22,7 @@ .. image:: https://img.shields.io/readthedocs/setuptools/latest.svg :target: https://setuptools.pypa.io -.. image:: https://img.shields.io/badge/skeleton-2021-informational +.. image:: https://img.shields.io/badge/skeleton-2022-informational :target: https://blog.jaraco.com/skeleton .. image:: https://img.shields.io/codecov/c/github/pypa/setuptools/master.svg?logo=codecov&logoColor=white @@ -31,13 +31,17 @@ .. image:: https://tidelift.com/badges/github/pypa/setuptools?style=flat :target: https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=readme +.. image:: https://img.shields.io/discord/803025117553754132 + :target: https://discord.com/channels/803025117553754132/815945031150993468 + :alt: Discord + See the `Installation Instructions <https://packaging.python.org/installing/>`_ in the Python Packaging User's Guide for instructions on installing, upgrading, and uninstalling Setuptools. -Questions and comments should be directed to the `distutils-sig -mailing list <http://mail.python.org/pipermail/distutils-sig/>`_. +Questions and comments should be directed to `GitHub Discussions +<https://github.com/pypa/setuptools/discussions>`_. Bug reports and especially tested patches may be submitted directly to the `bug tracker <https://github.com/pypa/setuptools/issues>`_. @@ -47,7 +51,7 @@ Code of Conduct =============== Everyone interacting in the setuptools project's codebases, issue trackers, -chat rooms, and mailing lists is expected to follow the +chat rooms, and fora is expected to follow the `PSF Code of Conduct <https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md>`_. diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py index f7074162..605a6edc 100644 --- a/_distutils_hack/__init__.py +++ b/_distutils_hack/__init__.py @@ -1,18 +1,11 @@ +# don't import any costly modules import sys import os -import re -import importlib -import warnings is_pypy = '__pypy__' in sys.builtin_module_names -warnings.filterwarnings('ignore', - r'.+ distutils\b.+ deprecated', - DeprecationWarning) - - def warn_distutils_present(): if 'distutils' not in sys.modules: return @@ -20,6 +13,7 @@ def warn_distutils_present(): # PyPy for 3.6 unconditionally imports distutils, so bypass the warning # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 return + import warnings warnings.warn( "Distutils was imported before Setuptools, but importing Setuptools " "also replaces the `distutils` module in `sys.modules`. This may lead " @@ -32,8 +26,12 @@ def warn_distutils_present(): def clear_distutils(): if 'distutils' not in sys.modules: return + import warnings warnings.warn("Setuptools is replacing distutils.") - mods = [name for name in sys.modules if re.match(r'distutils\b', name)] + mods = [ + name for name in sys.modules + if name == "distutils" or name.startswith("distutils.") + ] for name in mods: del sys.modules[name] @@ -42,23 +40,24 @@ def enabled(): """ Allow selection of distutils by environment variable. """ - which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib') + which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local') return which == 'local' def ensure_local_distutils(): + import importlib clear_distutils() # With the DistutilsMetaFinder in place, # perform an import to cause distutils to be # loaded from setuptools._distutils. Ref #2906. - add_shim() - importlib.import_module('distutils') - remove_shim() + with shim(): + importlib.import_module('distutils') # check that submodules load as expected core = importlib.import_module('distutils.core') assert '_distutils' in core.__file__, core.__file__ + assert 'setuptools._distutils.log' not in sys.modules def do_override(): @@ -73,6 +72,14 @@ def do_override(): ensure_local_distutils() +class _TrivialRe: + def __init__(self, *patterns): + self._patterns = patterns + + def match(self, string): + return all(pat in string for pat in self._patterns) + + class DistutilsMetaFinder: def find_spec(self, fullname, path, target=None): if path is not None: @@ -83,18 +90,46 @@ class DistutilsMetaFinder: return method() def spec_for_distutils(self): + if self.is_cpython(): + return + + import importlib import importlib.abc import importlib.util + try: + mod = importlib.import_module('setuptools._distutils') + except Exception: + # There are a couple of cases where setuptools._distutils + # may not be present: + # - An older Setuptools without a local distutils is + # taking precedence. Ref #2957. + # - Path manipulation during sitecustomize removes + # setuptools from the path but only after the hook + # has been loaded. Ref #2980. + # In either case, fall back to stdlib behavior. + return + class DistutilsLoader(importlib.abc.Loader): def create_module(self, spec): - return importlib.import_module('setuptools._distutils') + mod.__name__ = 'distutils' + return mod def exec_module(self, module): pass - return importlib.util.spec_from_loader('distutils', DistutilsLoader()) + return importlib.util.spec_from_loader( + 'distutils', DistutilsLoader(), origin=mod.__file__ + ) + + @staticmethod + def is_cpython(): + """ + Suppress supplying distutils for CPython (build and tests). + Ref #2965 and #3007. + """ + return os.path.isfile('pybuilddir.txt') def spec_for_pip(self): """ @@ -106,22 +141,42 @@ class DistutilsMetaFinder: clear_distutils() self.spec_for_distutils = lambda: None - @staticmethod - def pip_imported_during_build(): + @classmethod + def pip_imported_during_build(cls): """ Detect if pip is being imported in a build script. Ref #2355. """ import traceback return any( - frame.f_globals['__file__'].endswith('setup.py') + cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None) ) + @staticmethod + def frame_file_is_setup(frame): + """ + Return True if the indicated frame suggests a setup.py file. + """ + # some frames may not have __file__ (#2940) + return frame.f_globals.get('__file__', '').endswith('setup.py') + DISTUTILS_FINDER = DistutilsMetaFinder() def add_shim(): + DISTUTILS_FINDER in sys.meta_path or insert_shim() + + +class shim: + def __enter__(self): + insert_shim() + + def __exit__(self, exc, value, tb): + remove_shim() + + +def insert_shim(): sys.meta_path.insert(0, DISTUTILS_FINDER) diff --git a/bootstrap.py b/bootstrap.py deleted file mode 100644 index 229b9965..00000000 --- a/bootstrap.py +++ /dev/null @@ -1,7 +0,0 @@ -import warnings - - -msg = "bootstrap.py is no longer needed. Use a PEP-517-compatible builder instead." - - -__name__ == '__main__' and warnings.warn(msg) diff --git a/changelog.d/2902.change.rst b/changelog.d/2902.change.rst deleted file mode 100644 index 37f3daaf..00000000 --- a/changelog.d/2902.change.rst +++ /dev/null @@ -1 +0,0 @@ -Merge with pypa/distutils@85db7a41242. diff --git a/changelog.d/2906.misc.rst b/changelog.d/2906.misc.rst deleted file mode 100644 index 2ec890b4..00000000 --- a/changelog.d/2906.misc.rst +++ /dev/null @@ -1 +0,0 @@ -In ensure_local_distutils, re-use DistutilsMetaFinder to load the module. Avoids race conditions when _distutils_system_mod is employed. diff --git a/changelog.d/3282.misc.rst b/changelog.d/3282.misc.rst new file mode 100644 index 00000000..e7fbec76 --- /dev/null +++ b/changelog.d/3282.misc.rst @@ -0,0 +1 @@ +Added CI cache for ``setup.cfg`` examples used when testing ``setuptools.config``. diff --git a/changelog.d/3299.change.rst b/changelog.d/3299.change.rst new file mode 100644 index 00000000..c84d7f0f --- /dev/null +++ b/changelog.d/3299.change.rst @@ -0,0 +1 @@ +Optional metadata fields are now truly optional. diff --git a/changelog.d/README.rst b/changelog.d/README.rst index 49b4d563..6def76b5 100644 --- a/changelog.d/README.rst +++ b/changelog.d/README.rst @@ -21,8 +21,7 @@ recorded in the Git history rather than a changelog. Alright! So how to add a news fragment? ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -``setuptools`` uses `towncrier <https://pypi.org/project/towncrier/>`_ -for changelog management. +``setuptools`` uses :pypi:`towncrier` for changelog management. To submit a change note about your PR, add a text file into the ``changelog.d/`` folder. It should contain an explanation of what applying this PR will change in the way diff --git a/conftest.py b/conftest.py index d5e851fe..2271ec3e 100644 --- a/conftest.py +++ b/conftest.py @@ -1,5 +1,7 @@ import sys +import pytest + pytest_plugins = 'setuptools.tests.fixtures' @@ -9,6 +11,15 @@ def pytest_addoption(parser): "--package_name", action="append", default=[], help="list of package_name to pass to test functions", ) + parser.addoption( + "--integration", action="store_true", default=False, + help="run integration tests (only)" + ) + + +def pytest_configure(config): + config.addinivalue_line("markers", "integration: integration tests") + config.addinivalue_line("markers", "uses_network: tests may try to download files") collect_ignore = [ @@ -21,9 +32,20 @@ collect_ignore = [ 'pkg_resources/tests/data', 'setuptools/_vendor', 'pkg_resources/_vendor', + 'setuptools/config/_validate_pyproject', ] if sys.version_info < (3, 6): collect_ignore.append('docs/conf.py') # uses f-strings collect_ignore.append('pavement.py') + + +@pytest.fixture(autouse=True) +def _skip_integration(request): + running_integration_tests = request.config.getoption("--integration") + is_integration_test = request.node.get_closest_marker("integration") + if running_integration_tests and not is_integration_test: + pytest.skip("running integration tests only") + if not running_integration_tests and is_integration_test: + pytest.skip("skipping integration tests") diff --git a/docs/_ext/_custom_icons.py b/docs/_ext/_custom_icons.py deleted file mode 100644 index 245162c2..00000000 --- a/docs/_ext/_custom_icons.py +++ /dev/null @@ -1,58 +0,0 @@ -"""'In-tree' sphinx extension to add icons/favicons to documentation""" -import os -from sphinx.util.fileutil import copy_asset_file - - -IMAGES_DIR = "_images" # same used by .. image:: and .. picture:: - - -def _prepare_image(pathto, confdir, outdir, icon_attrs): - """Copy icon files to the ``IMAGES_DIR`` and return a modified version of - the icon attributes dict replacing ``file`` with the correct ``href``. - """ - icon = icon_attrs.copy() - src = os.path.join(confdir, icon.pop("file")) - if not os.path.exists(src): - raise FileNotFoundError(f"icon {src!r} not found") - - dest = os.path.join(outdir, IMAGES_DIR) - copy_asset_file(src, dest) # already compares if dest exists and is uptodate - - asset_name = os.path.basename(src) - icon["href"] = pathto(f"{IMAGES_DIR}/{asset_name}", resource=True) - return icon - - -def _link_tag(attrs): - return "<link " + " ".join(f'{k}="{v}"' for k, v in attrs.items()) + "/>" - - -def _add_icons(app, _pagename, _templatename, context, doctree): - """Add multiple "favicons", not limited to PNG/ICO files""" - # https://evilmartians.com/chronicles/how-to-favicon-in-2021-six-files-that-fit-most-needs - # https://caniuse.com/link-icon-svg - try: - pathto = context['pathto'] - except KeyError as ex: - msg = f"{__name__} extension is supposed to be call in HTML contexts" - raise ValueError(msg) from ex - - if doctree and "icons" in app.config: - icons = [ - _prepare_image(pathto, app.confdir, app.outdir, icon) - for icon in app.config["icons"] - ] - context["metatags"] += "\n".join(_link_tag(attrs) for attrs in icons) - - -def setup(app): - images_dir = os.path.join(app.outdir, IMAGES_DIR) - os.makedirs(images_dir, exist_ok=True) - - app.add_config_value("icons", None, "html") - app.connect("html-page-context", _add_icons) - - return { - 'parallel_read_safe': True, - 'parallel_write_safe': True, - } diff --git a/docs/images/README.rst b/docs/artwork.rst index 55a5a602..907e62a6 100644 --- a/docs/images/README.rst +++ b/docs/artwork.rst @@ -2,7 +2,7 @@ Artwork ======= -.. figure:: logo-over-white.svg +.. figure:: images/logo-over-white.svg :align: center Setuptools logo, designed in 2021 by `Anderson Bravalheri`_ @@ -35,7 +35,7 @@ on the circumstances: The following image illustrate these alternatives: -.. image:: logo-demo.svg +.. image:: images/logo-demo.svg :align: center Please refer to the SVG files in the `setuptools repository`_ for the specific @@ -107,7 +107,7 @@ used by the setuptools software (MIT): SETUPTOOLS PROJECT. Whenever possible, please make the image a link to -https://github.com/pypa/setuptools. +https://github.com/pypa/setuptools or https://setuptools.pypa.io. .. _Anderson Bravalheri: https://github.com/abravalheri diff --git a/docs/build_meta.rst b/docs/build_meta.rst index 27df70a2..57aea986 100644 --- a/docs/build_meta.rst +++ b/docs/build_meta.rst @@ -9,29 +9,29 @@ Python packaging has come `a long way <https://bernat.tech/posts/pep-517-518/>`_ The traditional ``setuptools`` way of packaging Python modules uses a ``setup()`` function within the ``setup.py`` script. Commands such as -``python setup.py bdist`` or ``python setup.py bdist_wheel`` generate a -distribution bundle and ``python setup.py install`` installs the distribution. -This interface makes it difficult to choose other packaging tools without an +``python setup.py bdist`` or ``python setup.py bdist_wheel`` generate a +distribution bundle and ``python setup.py install`` installs the distribution. +This interface makes it difficult to choose other packaging tools without an overhaul. Because ``setup.py`` scripts allowed for arbitrary execution, it proved difficult to provide a reliable user experience across environments and history. `PEP 517 <https://www.python.org/dev/peps/pep-0517/>`_ therefore came to -rescue and specified a new standard to +rescue and specified a new standard to package and distribute Python modules. Under PEP 517: a ``pyproject.toml`` file is used to specify what program to use - for generating distribution. + for generating distribution. - Then, two functions provided by the program, ``build_wheel(directory: str)`` - and ``build_sdist(directory: str)`` create the distribution bundle at the - specified ``directory``. The program is free to use its own configuration - script or extend the ``.toml`` file. + Then, two functions provided by the program, ``build_wheel(directory: str)`` + and ``build_sdist(directory: str)`` create the distribution bundle at the + specified ``directory``. The program is free to use its own configuration + script or extend the ``.toml`` file. Lastly, ``pip install *.whl`` or ``pip install *.tar.gz`` does the actual installation. If ``*.whl`` is available, ``pip`` will go ahead and copy the files into ``site-packages`` directory. If not, ``pip`` will look at - ``pyproject.toml`` and decide what program to use to 'build from source' + ``pyproject.toml`` and decide what program to use to 'build from source' (the default is ``setuptools``) With this standard, switching between packaging tools becomes a lot easier. ``build_meta`` @@ -48,17 +48,18 @@ scripts, a ``pyproject.toml`` file and a ``setup.cfg`` file:: setup.cfg meowpkg/__init__.py -The pyproject.toml file is required to specify the build system (i.e. what is -being used to package your scripts and install from source). To use it with +The pyproject.toml file is required to specify the build system (i.e. what is +being used to package your scripts and install from source). To use it with setuptools, the content would be:: [build-system] - requires = ["setuptools", "wheel"] + requires = ["setuptools"] build-backend = "setuptools.build_meta" The ``setuptools`` package implements the ``build_sdist`` command and the ``wheel`` package implements the ``build_wheel`` -command; both are required to be compliant with PEP 517. +command; the latter is a dependency of the former +exposed via :pep:`517` hooks. Use ``setuptools``' :ref:`declarative config <declarative config>` to specify the package information:: @@ -67,17 +68,19 @@ specify the package information:: name = meowpkg version = 0.0.1 description = a package that meows - + [options] packages = find: +.. _building: + Now generate the distribution. To build the package, use `PyPA build <https://pypa-build.readthedocs.io/en/latest/>`_:: $ pip install -q build $ python -m build -And now it's done! The ``.whl`` file and ``.tar.gz`` can then be distributed +And now it's done! The ``.whl`` file and ``.tar.gz`` can then be distributed and installed:: dist/ @@ -89,3 +92,85 @@ and installed:: or:: $ pip install dist/meowpkg-0.0.1.tar.gz + +Dynamic build dependencies and other ``build_meta`` tweaks +---------------------------------------------------------- + +With the changes introduced by :pep:`517` and :pep:`518`, the +``setup_requires`` configuration field was made deprecated in ``setup.cfg`` and +``setup.py``, in favour of directly listing build dependencies in the +``requires`` field of the ``build-system`` table of ``pyproject.toml``. +This approach has a series of advantages and gives package managers and +installers the ability to inspect in advance the build requirements and +perform a series of optimisations. + +However some package authors might still need to dynamically inspect the final +users machine before deciding these requirements. One way of doing that, as +specified by :pep:`517`, is to "tweak" ``setuptools.build_meta`` by using a +:pep:`in-tree backend <517#in-tree-build-backends>`. + +.. tip:: Before implementing a *in-tree* backend, have a look on + :pep:`PEP 508 <508#environment-markers>`. Most of the times, dependencies + with **environment markers** are enough to differentiate operating systems + and platforms. + +If you add the following configuration to your ``pyproject.toml``: + + +.. code-block:: toml + + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "backend" + backend-path = ["_custom_build"] + + +then you should be able to implement a thin wrapper around ``build_meta`` in +the ``_custom_build/backend.py`` file, as shown in the following example: + +.. code-block:: python + + from setuptools import build_meta as _orig + + prepare_metadata_for_build_wheel = _orig.prepare_metadata_for_build_wheel + build_wheel = _orig.build_wheel + build_sdist = _orig.build_sdist + + + def get_requires_for_build_wheel(self, config_settings=None): + return _orig.get_requires_for_build_wheel(config_settings) + [...] + + + def get_requires_for_build_sdist(self, config_settings=None): + return _orig.get_requires_for_build_sdist(config_settings) + [...] + + +Note that you can override any of the functions specified in :pep:`PEP 517 +<517#build-backend-interface>`, not only the ones responsible for gathering +requirements. + +.. important:: Make sure your backend script is included in the :doc:`source + distribution </userguide/distribution>`, otherwise the build will fail. + This can be done by using a SCM_/VCS_ plugin (like :pypi:`setuptools-scm` + and :pypi:`setuptools-svn`), or by correctly setting up :ref:`MANIFEST.in + <manifest>`. + + If this is the first time you are using a customised backend, please have a + look on the generated ``.tar.gz`` and ``.whl``. + On POSIX systems that can be done with ``tar -tf dist/*.tar.gz`` + and ``unzip -l dist/*.whl``. + On Windows systems you can rename the ``.whl`` to ``.zip`` to be able to + inspect it on the file explorer, and use the same ``tar`` command in a + command prompt (alternativelly there are GUI programs like `7-zip`_ that + handle ``.tar.gz``). + + In general the backend script should be present in the ``.tar.gz`` (so the + project can be build from the source) but not in the ``.whl`` (otherwise the + backend script would end up being distributed alongside your package). + See ":doc:`/userguide/package_discovery`" for more details about package + files. + + +.. _SCM: https://en.wikipedia.org/wiki/Software_configuration_management +.. _VCS: https://en.wikipedia.org/wiki/Version_control +.. _7-zip: https://www.7-zip.org diff --git a/docs/conf.py b/docs/conf.py index 3cc8e35b..4ebb521c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,6 +1,3 @@ -import os -import sys - extensions = ['sphinx.ext.autodoc', 'jaraco.packaging.sphinx', 'rst.linker'] master_doc = "index" @@ -13,7 +10,11 @@ link_files = { ), replace=[ dict( - pattern=r'(Issue )?#(?P<issue>\d+)', + pattern=r'(?<!\w)PR #(?P<pull>\d+)', + url='{package_url}/pull/{pull}', + ), + dict( + pattern=r'(?<!\w)(Issue )?#(?P<issue>\d+)', url='{package_url}/issues/{issue}', ), dict( @@ -57,7 +58,7 @@ link_files = { url='{GH}/pypa/packaging/blob/{packaging_ver}/CHANGELOG.rst', ), dict( - pattern=r'PEP[- ](?P<pep_number>\d+)', + pattern=r'(?<![`/\w])PEP[- ](?P<pep_number>\d+)', url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/', ), dict( @@ -65,8 +66,12 @@ link_files = { url='{GH}/jaraco/setuptools_svn/issues/{setuptools_svn}', ), dict( - pattern=r'pypa/distutils#(?P<distutils>\d+)', - url='{GH}/pypa/distutils/issues/{distutils}', + pattern=r'pypa/(?P<issue_repo>[\-\.\w]+)#(?P<issue_number>\d+)', + url='{GH}/pypa/{issue_repo}/issues/{issue_number}', + ), + dict( + pattern=r'pypa/(?P<commit_repo>[\-\.\w]+)@(?P<commit_number>[\da-f]+)', + url='{GH}/pypa/{commit_repo}/commit/{commit_number}', ), dict( pattern=r'^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n', @@ -92,9 +97,15 @@ intersphinx_mapping.update({ # Add support for linking usernames github_url = 'https://github.com' +github_repo_org = 'pypa' +github_repo_name = 'setuptools' +github_repo_slug = f'{github_repo_org}/{github_repo_name}' +github_repo_url = f'{github_url}/{github_repo_slug}' github_sponsors_url = f'{github_url}/sponsors' extlinks = { 'user': (f'{github_sponsors_url}/%s', '@'), # noqa: WPS323 + 'pypi': ('https://pypi.org/project/%s', '%s'), # noqa: WPS323 + 'wiki': ('https://wikipedia.org/wiki/%s', '%s'), # noqa: WPS323 } extensions += ['sphinx.ext.extlinks'] @@ -161,8 +172,8 @@ nitpick_ignore = [ # Allow linking objects on other Sphinx sites seamlessly: intersphinx_mapping.update( - python=('https://docs.python.org/3', None), python2=('https://docs.python.org/2', None), + python=('https://docs.python.org/3', None), ) # Add support for the unreleased "next-version" change notes @@ -175,26 +186,30 @@ towncrier_draft_include_empty = False extensions += ['jaraco.tidelift'] # Add icons (aka "favicons") to documentation -sys.path.append(os.path.join(os.path.dirname(__file__), '_ext')) -extensions += ['_custom_icons'] +extensions += ['sphinx-favicon'] +html_static_path = ['images'] # should contain the folder with icons # List of dicts with <link> HTML attributes -# as defined in https://developer.mozilla.org/en-US/docs/Web/HTML/Element/link -# except that ``file`` gets replaced with the correct ``href`` -icons = [ +# static-file points to files in the html_static_path (href is computed) +favicons = [ { # "Catch-all" goes first, otherwise some browsers will overwrite "rel": "icon", "type": "image/svg+xml", - "file": "images/logo-symbol-only.svg", + "static-file": "logo-symbol-only.svg", "sizes": "any" }, { # Version with thicker strokes for better visibility at smaller sizes "rel": "icon", "type": "image/svg+xml", - "file": "images/favicon.svg", + "static-file": "favicon.svg", "sizes": "16x16 24x24 32x32 48x48" }, # rel="apple-touch-icon" does not support SVG yet ] intersphinx_mapping['pip'] = 'https://pip.pypa.io/en/latest', None +intersphinx_mapping['PyPUG'] = ('https://packaging.python.org/en/latest/', None) +intersphinx_mapping['packaging'] = ('https://packaging.pypa.io/en/latest/', None) +intersphinx_mapping['importlib-resources'] = ( + 'https://importlib-resources.readthedocs.io/en/latest', None +) diff --git a/docs/deprecated/distutils-legacy.rst b/docs/deprecated/distutils-legacy.rst index 94104fe8..e73cdff5 100644 --- a/docs/deprecated/distutils-legacy.rst +++ b/docs/deprecated/distutils-legacy.rst @@ -3,11 +3,10 @@ Porting from Distutils Setuptools and the PyPA have a `stated goal <https://github.com/pypa/packaging-problems/issues/127>`_ to make Setuptools the reference API for distutils. -Since the 49.1.2 release, Setuptools includes a local, vendored copy of distutils (from late copies of CPython) that is disabled by default. To enable the use of this copy of distutils when invoking setuptools, set the enviroment variable: +Since the 60.0.0 release, Setuptools includes a local, vendored copy of distutils (from late copies of CPython) that is enabled by default. To disable the use of this copy of distutils when invoking setuptools, set the enviroment variable: - SETUPTOOLS_USE_DISTUTILS=local + SETUPTOOLS_USE_DISTUTILS=stdlib -This behavior is planned to become the default. Prefer Setuptools ----------------- @@ -20,12 +19,15 @@ As Distutils is deprecated, any usage of functions or objects from distutils is ``distutils.command.{build_clib,build_ext,build_py,sdist}`` → ``setuptools.command.*`` -``distutils.log`` → (no replacement yet) +``distutils.log`` → :mod:`logging` (standard library) -``distutils.version.*`` → ``packaging.version.*`` +``distutils.version.*`` → :doc:`packaging.version.* <packaging:version>` ``distutils.errors.*`` → ``setuptools.errors.*`` [#errors]_ + +Migration advice is also provided by :pep:`PEP 632 <632#migration-advice>`. + If a project relies on uses of ``distutils`` that do not have a suitable replacement above, please search the `Setuptools issue tracker <https://github.com/pypa/setuptools/issues/>`_ and file a request, describing the use-case so that Setuptools' maintainers can investigate. Please provide enough detail to help the maintainers understand how distutils is used, what value it provides, and why that behavior should be supported. diff --git a/docs/deprecated/easy_install.rst b/docs/deprecated/easy_install.rst index 76c3f608..3cf3bea9 100644 --- a/docs/deprecated/easy_install.rst +++ b/docs/deprecated/easy_install.rst @@ -34,7 +34,7 @@ Using "Easy Install" Installing "Easy Install" ------------------------- -Please see the `setuptools PyPI page <https://pypi.org/project/setuptools/>`_ +Please see the :pypi:`setuptools` on the package index for download links and basic installation instructions for each of the supported platforms. @@ -1020,10 +1020,7 @@ of the User installation scheme. "virtualenv" provides a version of ``easy_inst scoped to the cloned python install and is used in the normal way. "virtualenv" does offer various features that the User installation scheme alone does not provide, e.g. the ability to hide the main python site-packages. -Please refer to the `virtualenv`_ documentation for more details. - -.. _virtualenv: https://pypi.org/project/virtualenv/ - +Please refer to the :pypi:`virtualenv` documentation for more details. Package Index "API" diff --git a/docs/deprecated/functionalities.rst b/docs/deprecated/functionalities.rst index c6ea83b3..7213c5d6 100644 --- a/docs/deprecated/functionalities.rst +++ b/docs/deprecated/functionalities.rst @@ -30,4 +30,4 @@ invoked via symlinks. They *must* be invoked using their original filename, in order to ensure that, once running, ``pkg_resources`` will know what project and version is in use. The header script will check this and exit with an error if the ``.egg`` file has been renamed or is invoked via a symlink that -changes its base name.
\ No newline at end of file +changes its base name. diff --git a/docs/deprecated/index.rst b/docs/deprecated/index.rst index ce2ac006..59fc7bef 100644 --- a/docs/deprecated/index.rst +++ b/docs/deprecated/index.rst @@ -13,7 +13,6 @@ objectives. .. toctree:: :maxdepth: 1 - python3 python_eggs easy_install distutils/index diff --git a/docs/development/developer-guide.rst b/docs/development/developer-guide.rst index f29c1a80..d2cf1592 100644 --- a/docs/development/developer-guide.rst +++ b/docs/development/developer-guide.rst @@ -25,12 +25,12 @@ Setuptools is maintained primarily in GitHub at `this home Python Packaging Authority (PyPA) with several core contributors. All bugs for Setuptools are filed and the canonical source is maintained in GitHub. -User support and discussions are done through the issue tracker (for specific) -issues, through the `distutils-sig mailing list <https://mail.python.org/mailman3/lists/distutils-sig.python.org/>`_, or on IRC (Freenode) at -#pypa. +User support and discussions are done through +`GitHub Discussions <https://github.com/pypa/setuptools/discussions>`_, +or the issue tracker (for specific issues). -Discussions about development happen on the distutils-sig mailing list or on -`Gitter <https://gitter.im/pypa/setuptools>`_. +Discussions about development happen on GitHub Discussions or +the ``setuptools`` channel on `PyPA Discord <https://discord.com/invite/pypa>`_. ----------------- Authoring Tickets @@ -125,12 +125,9 @@ cannot declare dependencies other than through ``setuptools/_vendor/vendored.txt`` and ``pkg_resources/_vendor/vendored.txt``. -All the dependencies specified in these files are "vendorized" using Paver_, a -simple Python-based project scripting and task running tool. +All the dependencies specified in these files are "vendorized" using a +simple Python script ``tools/vendor.py``. -To refresh the dependencies, you can run the following command (defined in -``pavement.py``):: +To refresh the dependencies, run the following command:: - $ paver update_vendored - -.. _Paver: https://pythonhosted.org/Paver/ + $ tox -e vendor diff --git a/docs/images/banner-640x320.svg b/docs/images/banner-640x320.svg index 8222f645..4e908ea1 100644 --- a/docs/images/banner-640x320.svg +++ b/docs/images/banner-640x320.svg @@ -1,101 +1,36 @@ -<?xml version="1.0" encoding="UTF-8" standalone="no"?> -<svg - xmlns:dc="http://purl.org/dc/elements/1.1/" - xmlns:cc="http://creativecommons.org/ns#" - xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" - xmlns:svg="http://www.w3.org/2000/svg" - xmlns="http://www.w3.org/2000/svg" - id="svg1021" - version="1.1" - viewBox="0 0 169.33333 84.666662" - height="320" - width="640"> - <defs - id="defs1015" /> - <metadata - id="metadata1018"> - <rdf:RDF> - <cc:Work - rdf:about=""> - <dc:format>image/svg+xml</dc:format> - <dc:type - rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> - <dc:title></dc:title> - </cc:Work> - </rdf:RDF> - </metadata> - <g - transform="matrix(2.0252536,0,0,2.0252536,-0.20252404,-105.4321)" - id="g848"> - <g - id="layer1" - transform="matrix(0.93437246,0,0,0.93437246,0.32003559,-27.205658)"> - <g - id="g1751" - transform="matrix(0.45669594,0,0,0.45669594,-11.041899,60.847347)"> - <path - style="fill:#e5b62f;fill-opacity:1;fill-rule:nonzero;stroke:#e5b62f;stroke-width:0.4774465;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" - d="m 56.863889,87.985612 c -1.245468,-0.01183 -3.601847,0.03564 -3.601847,0.03564 -0.114287,0 -0.20619,0.09589 -0.20619,0.214976 v 1.46244 H 50.90973 v -1.46244 c 0,-0.1191 -0.0919,-0.214976 -0.206189,-0.214976 h -2.559017 c -0.114286,0 -0.206705,0.09589 -0.206705,0.214976 v 5.418788 c 0,0.1191 0.09235,0.214977 0.206705,0.214977 h 2.559017 c 0.114287,0 0.206189,-0.09587 0.206189,-0.214977 v -1.461923 h 2.146122 v 1.461923 c 0,0.1191 0.09191,0.214977 0.20619,0.214977 h 1.295011 v 6.895187 c -0.742529,0.29471 -1.265555,1.01661 -1.265555,1.86655 v 10.45001 c 0,1.11329 0.895885,2.00918 2.00918,2.00918 1.113292,0 2.009695,-0.89589 2.009695,-2.00918 v -10.45001 c 0,-0.84994 -0.523022,-1.57184 -1.265555,-1.86655 v -6.895187 h 1.402499 c 0.223398,-0.196684 0.438705,-0.985378 1.631942,-1.588534 3.204289,-1.100736 4.595703,1.625992 4.571295,1.249532 -0.02611,-0.401913 -0.203636,-2.922341 -3.001367,-4.400762 -1.218895,-0.548108 -2.652008,-1.086577 -3.038575,-1.108977 -0.03989,-0.02102 -0.331568,-0.03172 -0.746723,-0.03564 z" - id="rect934" /> - <path - style="fill:#336790;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.19989915;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" - d="m 59.665281,79.605246 c -1.099312,0.01828 -1.969648,0.473642 -2.497521,0.866095 -0.703832,0.523277 -0.929574,0.822626 -1.994194,0.825277 -1.06462,0.0027 -1.290873,-0.296205 -1.997295,-0.815972 -0.706421,-0.519766 -2.022679,-1.14672 -3.672644,-0.700215 -1.649965,0.446506 -2.470018,1.651613 -2.817916,2.456698 -0.347897,0.80508 -0.393465,1.177432 -1.314132,1.712036 -0.920666,0.534609 -1.266695,0.389926 -2.13837,0.293006 -0.871661,-0.09692 -2.325918,0.0172 -3.531568,1.228868 -1.205665,1.211665 -1.314089,2.667116 -1.212846,3.538286 0.101228,0.871175 0.247854,1.216089 -0.282153,2.139404 -0.530022,0.923314 -0.900427,0.969986 -1.703772,1.321884 -0.803345,0.351898 -2.005999,1.177126 -2.444296,2.829281 -0.438296,1.652161 0.197124,2.965197 0.72037,3.66903 0.523262,0.703833 0.822625,0.929572 0.825273,1.994196 0.0026,1.06462 -0.296213,1.29035 -0.815971,1.99677 -0.519773,0.70643 -1.146203,2.02269 -0.699699,3.67265 0.446504,1.64996 1.651089,2.47053 2.456181,2.81843 0.805077,0.3479 1.17743,0.39347 1.71204,1.31414 0.534596,0.92066 0.389929,1.26618 0.293006,2.13785 -0.09692,0.87166 0.01712,2.32643 1.228865,3.53208 1.211669,1.20565 2.667112,1.311 3.538286,1.20975 0.871174,-0.10126 1.21609,-0.24477 2.139405,0.28525 0.923314,0.53002 0.96998,0.90043 1.321882,1.70377 0.351903,0.80334 1.177131,2.00548 2.829285,2.44378 1.652155,0.43829 3.67799,-0.39906 3.669028,-0.72037 -0.0091,-0.3213 -1.365829,0.23152 -2.725932,-0.62425 -1.360103,-0.85577 -1.262319,-1.38407 -1.562179,-2.06861 -0.299861,-0.68453 -0.836256,-1.96976 -2.332674,-2.82876 -1.496404,-0.859 -2.876052,-0.67281 -3.618384,-0.58653 -0.742333,0.0863 -0.791945,0.23675 -1.558045,-0.52555 -0.7661,-0.76231 -0.613297,-0.81116 -0.530717,-1.55391 0.08259,-0.74275 0.260268,-2.12213 -0.606165,-3.61425 -0.866433,-1.49213 -2.153087,-2.02279 -2.839103,-2.31924 -0.686016,-0.29645 -0.803407,-0.18822 -1.085722,-1.23145 -0.282315,-1.04323 -0.129234,-1.01345 0.313676,-1.61541 0.442894,-0.60195 1.288974,-1.7064 1.284676,-3.43183 -0.0042,-1.725427 -0.855847,-2.828991 -1.30173,-3.428727 -0.445882,-0.599742 -0.600626,-0.566654 -0.323494,-1.611273 0.277117,-1.044619 0.396537,-0.936759 1.081071,-1.23662 0.684535,-0.299854 1.969763,-0.836259 2.828768,-2.332673 0.85899,-1.496409 0.672807,-2.876051 0.586528,-3.618385 -0.08624,-0.742328 -0.23624,-0.791936 0.526065,-1.558041 0.762306,-0.766106 0.810639,-0.616402 1.553394,-0.533819 0.74274,0.08258 2.122129,0.263369 3.61425,-0.603064 1.492121,-0.866434 2.020204,-2.153086 2.316655,-2.839103 0.29645,-0.686018 0.191317,-0.803408 1.234549,-1.085722 1.043233,-0.282314 1.012934,-0.129212 1.614888,0.313676 0.601956,0.442893 1.706409,1.288969 3.431833,1.284673 1.72543,-0.0043 2.825891,-0.855327 3.42563,-1.301209 0.599739,-0.445887 0.567168,-0.601134 1.611789,-0.324013 1.044619,0.27712 0.939345,0.396538 1.2392,1.081071 0.299857,0.684534 0.836779,1.969772 2.333191,2.828771 1.496414,0.858999 2.875536,0.67332 3.617867,0.587044 0.742334,-0.08628 0.791938,-0.236759 1.558044,0.525546 0.766107,0.762305 0.613818,0.810644 0.531236,1.553397 -0.08258,0.742748 -0.260784,2.122126 0.605648,3.614245 0.86643,1.492124 2.153086,2.020213 2.839103,2.316659 0.686017,0.296447 0.803923,0.191318 1.086239,1.23455 0.282313,1.043226 0.128696,1.012939 -0.314193,1.614885 -0.442896,0.601952 -1.288968,1.706403 -1.284676,3.431832 0.0043,1.72543 0.855845,2.82589 1.301729,3.42563 0.445883,0.59974 0.600615,0.56717 0.323496,1.61179 -0.277125,1.04462 -0.396539,0.93986 -1.081072,1.23972 -0.684536,0.29985 -1.96977,0.83626 -2.828769,2.33267 -0.858996,1.49642 -0.672804,2.87554 -0.586528,3.61787 0.08628,0.74233 0.236756,0.79194 -0.525548,1.55804 -0.762307,0.76611 -0.811159,0.61382 -1.553911,0.53124 -0.742751,-0.0826 -2.122128,-0.26027 -3.614251,0.60616 -1.492118,0.86644 -2.023308,2.15309 -2.319753,2.8391 -0.246329,0.57005 -0.222236,0.74776 -0.802536,0.95343 -0.08286,-0.046 -0.17925,-0.0898 -0.293521,-0.1328 -0.962139,-0.36199 -1.088237,0.12751 -1.877923,0.71571 -0.710128,0.52895 -1.218404,0.5911 -1.188558,1.28882 -0.06706,-0.007 -0.130334,-0.008 -0.180867,-0.002 -0.161229,0.0187 -0.224964,0.0454 -0.395841,-0.0527 -0.170875,-0.0981 -0.179305,-0.16656 -0.244429,-0.31522 -0.06513,-0.14868 -0.217729,-0.37158 -0.523484,-0.45269 -0.305771,-0.0811 -0.680696,0.0739 -0.679026,0.13332 0.0016,0.0594 0.252655,-0.0426 0.504361,0.11576 0.251707,0.15837 0.233379,0.25624 0.28887,0.38292 0.05548,0.1267 0.155085,0.36451 0.432017,0.52349 0.276934,0.15896 0.531828,0.12449 0.669208,0.10851 0.104899,-0.0122 0.136234,-0.0302 0.20774,0.0243 0.210754,0.72266 0.721156,0.6429 1.605587,0.97565 0.962139,0.36199 1.256184,0.7975 2.045869,0.20929 0.578536,-0.43092 0.52042,-0.74207 0.380855,-1.25005 1.285693,-0.53767 1.960354,-1.55784 2.267045,-2.26756 0.347901,-0.80508 0.393468,-1.17692 1.314133,-1.71153 0.920665,-0.5346 1.26619,-0.39043 2.137855,-0.29351 0.871664,0.0969 2.326431,-0.0167 3.532084,-1.22836 1.205653,-1.21166 1.313579,-2.66762 1.212329,-3.5388 -0.101259,-0.87117 -0.247349,-1.21557 0.28267,-2.13889 0.530016,-0.92331 0.90043,-0.9705 1.703771,-1.3224 0.803344,-0.35189 2.005486,-1.17661 2.443779,-2.82876 0.438293,-1.65216 -0.197101,-2.9652 -0.72037,-3.66903 -0.523275,-0.70383 -0.822109,-0.92957 -0.824755,-1.9942 -0.0026,-1.064617 0.296204,-1.290862 0.81597,-1.997291 0.519766,-0.706424 1.146207,-2.022685 0.699698,-3.672642 C 75.90923,93.54616 74.704122,92.726098 73.899037,92.3782 73.093955,92.030302 72.72212,91.984737 72.187513,91.064067 c -0.534601,-0.920664 -0.390437,-1.266702 -0.293521,-2.138366 0.09692,-0.87167 -0.0167,-2.325916 -1.228349,-3.531571 -1.211665,-1.20565 -2.667629,-1.314095 -3.538802,-1.212843 -0.87117,0.101258 -1.215573,0.247861 -2.138887,-0.282152 -0.923314,-0.530018 -0.970502,-0.900435 -1.3224,-1.703774 -0.351899,-0.803295 -1.176611,-2.00595 -2.828769,-2.444247 -0.413039,-0.109574 -0.805067,-0.151817 -1.171504,-0.145726 z" - id="path944" /> - </g> - <g - id="g1747" - transform="translate(0,-23.386941)"> - <g - id="text1023" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#336790;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="SETUP"> - <path - id="path1713" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 28.181456,134.80041 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543912,0.0819 0.877276,0.25734 0.467881,0.23979 0.772004,0.61994 0.304122,0.38015 0.421092,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836431,0.69012 z" /> - <path - id="path1715" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 32.035622,126.47213 h 4.491654 v 1.40365 H 33.53284 v 1.98849 h 2.245827 v 1.40364 H 33.53284 v 2.03528 h 2.994436 v 1.40364 h -4.491654 z" /> - <path - id="path1717" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 42.884604,126.47213 v 1.40365 h -1.871523 v 6.83105 h -1.497218 v -6.83105 h -1.871522 v -1.40365 z" /> - <path - id="path1719" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 43.627365,126.47213 h 1.497218 v 5.24027 q 0,0.54976 0.04094,0.74861 0.08773,0.42109 0.374305,0.67842 0.292425,0.25734 0.707669,0.25734 0.415244,0 0.701821,-0.25734 0.292425,-0.25733 0.380153,-0.67842 0.04094,-0.19885 0.04094,-0.74861 v -5.24027 h 1.497218 v 5.24027 q 0,0.82464 -0.09358,1.22234 -0.198849,0.82463 -0.888973,1.34515 -0.684275,0.52052 -1.637582,0.52052 -0.953307,0 -1.643431,-0.52052 -0.684275,-0.52052 -0.883124,-1.34515 -0.09358,-0.3977 -0.09358,-1.22234 z" /> - <path - id="path1721" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 51.48191,127.87578 v 2.38619 h 0.362607 q 0.502972,0 0.731064,-0.0468 0.397698,-0.0819 0.631638,-0.39185 0.239789,-0.30997 0.239789,-0.75446 0,-0.44449 -0.239789,-0.75446 -0.23394,-0.30997 -0.631638,-0.39185 -0.228092,-0.0468 -0.731064,-0.0468 z m -1.497218,-1.40365 h 1.848128 q 0.865579,0 1.234035,0.0877 0.830488,0.19885 1.351006,0.87143 0.526365,0.66673 0.526365,1.63758 0,0.97085 -0.526365,1.64343 -0.520518,0.66673 -1.351006,0.86558 -0.368456,0.0877 -1.234035,0.0877 h -0.35091 v 3.04122 h -1.497218 z" /> - </g> - <g - id="text1661" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#e5b62f;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="TOOLS"> - <path - id="path1724" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 60.833675,126.47213 v 1.40365 h -1.871523 v 6.83105 h -1.497218 v -6.83105 h -1.871522 v -1.40365 z" /> - <path - id="path1726" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 63.067804,129.08057 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105274,0.5673 0.386002,0.93576 0.286577,0.36261 0.74276,0.36261 0.456184,0 0.736912,-0.36261 0.286577,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105273,-0.56731 -0.39185,-0.92991 -0.280728,-0.36846 -0.736912,-0.36846 -0.456183,0 -0.74276,0.36846 -0.280728,0.3626 -0.386002,0.92991 z m -1.421187,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04113 0.23394,-0.94745 0.883125,-1.5557 0.649184,-0.61409 1.666824,-0.61409 1.017641,0 1.666825,0.61409 0.649185,0.60825 0.883125,1.5557 0.163758,0.64919 0.163758,2.04113 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60825 -1.666825,0.60825 -1.01764,0 -1.666824,-0.60825 -0.649185,-0.61409 -0.883125,-1.56155 z" /> - <path - id="path1728" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 69.050828,129.08057 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105273,0.5673 0.386001,0.93576 0.286577,0.36261 0.742761,0.36261 0.456183,0 0.736912,-0.36261 0.286576,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105274,-0.56731 -0.39185,-0.92991 -0.280729,-0.36846 -0.736912,-0.36846 -0.456184,0 -0.742761,0.36846 -0.280728,0.3626 -0.386001,0.92991 z m -1.421188,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04113 0.233941,-0.94745 0.883125,-1.5557 0.649184,-0.61409 1.666825,-0.61409 1.01764,0 1.666824,0.61409 0.649185,0.60825 0.883125,1.5557 0.163758,0.64919 0.163758,2.04113 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60825 -1.666824,0.60825 -1.017641,0 -1.666825,-0.60825 -0.649184,-0.61409 -0.883125,-1.56155 z" /> - <path - id="path1730" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 73.916786,134.70683 v -8.2347 h 1.497218 v 6.83106 h 3.368741 v 1.40364 z" /> - <path - id="path1732" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 82.028666,134.80041 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543911,0.0819 0.877276,0.25734 0.467881,0.23979 0.772003,0.61994 0.304123,0.38015 0.421093,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836431,0.69012 z" /> - </g> - </g> +<?xml version="1.0" encoding="UTF-8"?> +<svg id="svg1021" width="640" height="320" version="1.1" viewBox="0 0 169.33 84.667" xmlns="http://www.w3.org/2000/svg" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"> + <metadata id="metadata1018"> + <rdf:RDF> + <cc:Work rdf:about=""> + <dc:format>image/svg+xml</dc:format> + <dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/> + <dc:title/> + </cc:Work> + </rdf:RDF> + </metadata> + <g id="g848" transform="matrix(2.0253 0 0 2.0253 -.20252 -105.43)"> + <g id="layer1" transform="matrix(.93437 0 0 .93437 .32004 -27.206)"> + <g id="g1751" transform="matrix(.4567 0 0 .4567 -11.042 60.847)"> + <path id="rect934" d="m56.864 87.986c-1.2455-0.01183-3.6018 0.03564-3.6018 0.03564-0.11429 0-0.20619 0.09589-0.20619 0.21498v1.4624h-2.1461v-1.4624c0-0.1191-0.0919-0.21498-0.20619-0.21498h-2.559c-0.11429 0-0.2067 0.09589-0.2067 0.21498v5.4188c0 0.1191 0.09235 0.21498 0.2067 0.21498h2.559c0.11429 0 0.20619-0.09587 0.20619-0.21498v-1.4619h2.1461v1.4619c0 0.1191 0.09191 0.21498 0.20619 0.21498h1.295v6.8952c-0.74253 0.29471-1.2656 1.0166-1.2656 1.8666v10.45c0 1.1133 0.89588 2.0092 2.0092 2.0092 1.1133 0 2.0097-0.89589 2.0097-2.0092v-10.45c0-0.84994-0.52302-1.5718-1.2656-1.8666v-6.8952h1.4025c0.2234-0.19668 0.4387-0.98538 1.6319-1.5885 3.2043-1.1007 4.5957 1.626 4.5713 1.2495-0.02611-0.40191-0.20364-2.9223-3.0014-4.4008-1.2189-0.54811-2.652-1.0866-3.0386-1.109-0.03989-0.02102-0.33157-0.03172-0.74672-0.03564z" fill="#e5b62f" stroke="#e5b62f" stroke-width=".47745" style="paint-order:markers fill stroke"/> + <path id="path944" d="m59.665 79.605c-1.0993 0.01828-1.9696 0.47364-2.4975 0.8661-0.70383 0.52328-0.92957 0.82263-1.9942 0.82528-1.0646 0.0027-1.2909-0.2962-1.9973-0.81597-0.70642-0.51977-2.0227-1.1467-3.6726-0.70022-1.65 0.44651-2.47 1.6516-2.8179 2.4567-0.3479 0.80508-0.39346 1.1774-1.3141 1.712-0.92067 0.53461-1.2667 0.38993-2.1384 0.29301-0.87166-0.09692-2.3259 0.0172-3.5316 1.2289-1.2057 1.2117-1.3141 2.6671-1.2128 3.5383 0.10123 0.87118 0.24785 1.2161-0.28215 2.1394-0.53002 0.92331-0.90043 0.96999-1.7038 1.3219s-2.006 1.1771-2.4443 2.8293c-0.4383 1.6522 0.19712 2.9652 0.72037 3.669 0.52326 0.70383 0.82262 0.92957 0.82527 1.9942 0.0026 1.0646-0.29621 1.2904-0.81597 1.9968-0.51977 0.70643-1.1462 2.0227-0.6997 3.6726s1.6511 2.4705 2.4562 2.8184c0.80508 0.3479 1.1774 0.39347 1.712 1.3141 0.5346 0.92066 0.38993 1.2662 0.29301 2.1378-0.09692 0.87166 0.01712 2.3264 1.2289 3.5321 1.2117 1.2056 2.6671 1.311 3.5383 1.2098 0.87117-0.10126 1.2161-0.24477 2.1394 0.28525 0.92331 0.53002 0.96998 0.90043 1.3219 1.7038 0.3519 0.80334 1.1771 2.0055 2.8293 2.4438 1.6522 0.43829 3.678-0.39906 3.669-0.72037-0.0091-0.3213-1.3658 0.23152-2.7259-0.62425s-1.2623-1.3841-1.5622-2.0686c-0.29986-0.68453-0.83626-1.9698-2.3327-2.8288-1.4964-0.859-2.8761-0.67281-3.6184-0.58653-0.74233 0.0863-0.79194 0.23675-1.558-0.52555-0.7661-0.76231-0.6133-0.81116-0.53072-1.5539 0.08259-0.74275 0.26027-2.1221-0.60616-3.6142-0.86643-1.4921-2.1531-2.0228-2.8391-2.3192s-0.80341-0.18822-1.0857-1.2314-0.12923-1.0134 0.31368-1.6154c0.44289-0.60195 1.289-1.7064 1.2847-3.4318-0.0042-1.7254-0.85585-2.829-1.3017-3.4287-0.44588-0.59974-0.60063-0.56665-0.32349-1.6113 0.27712-1.0446 0.39654-0.93676 1.0811-1.2366 0.68454-0.29985 1.9698-0.83626 2.8288-2.3327 0.85899-1.4964 0.67281-2.8761 0.58653-3.6184-0.08624-0.74233-0.23624-0.79194 0.52606-1.558 0.76231-0.76611 0.81064-0.6164 1.5534-0.53382 0.74274 0.08258 2.1221 0.26337 3.6142-0.60306 1.4921-0.86643 2.0202-2.1531 2.3167-2.8391 0.29645-0.68602 0.19132-0.80341 1.2345-1.0857 1.0432-0.28231 1.0129-0.12921 1.6149 0.31368 0.60196 0.44289 1.7064 1.289 3.4318 1.2847 1.7254-0.0043 2.8259-0.85533 3.4256-1.3012 0.59974-0.44589 0.56717-0.60113 1.6118-0.32401 1.0446 0.27712 0.93934 0.39654 1.2392 1.0811 0.29986 0.68453 0.83678 1.9698 2.3332 2.8288 1.4964 0.859 2.8755 0.67332 3.6179 0.58704 0.74233-0.08628 0.79194-0.23676 1.558 0.52555 0.76611 0.7623 0.61382 0.81064 0.53124 1.5534-0.08258 0.74275-0.26078 2.1221 0.60565 3.6142 0.86643 1.4921 2.1531 2.0202 2.8391 2.3167 0.68602 0.29645 0.80392 0.19132 1.0862 1.2346 0.28231 1.0432 0.1287 1.0129-0.31419 1.6149-0.4429 0.60195-1.289 1.7064-1.2847 3.4318 0.0043 1.7254 0.85584 2.8259 1.3017 3.4256 0.44588 0.59974 0.60062 0.56717 0.3235 1.6118-0.27712 1.0446-0.39654 0.93986-1.0811 1.2397-0.68454 0.29985-1.9698 0.83626-2.8288 2.3327-0.859 1.4964-0.6728 2.8755-0.58653 3.6179 0.08628 0.74233 0.23676 0.79194-0.52555 1.558-0.76231 0.76611-0.81116 0.61382-1.5539 0.53124-0.74275-0.0826-2.1221-0.26027-3.6143 0.60616-1.4921 0.86644-2.0233 2.1531-2.3198 2.8391-0.24633 0.57005-0.22224 0.74776-0.80254 0.95343-0.08286-0.046-0.17925-0.0898-0.29352-0.1328-0.96214-0.36199-1.0882 0.12751-1.8779 0.71571-0.71013 0.52895-1.2184 0.5911-1.1886 1.2888-0.06706-7e-3 -0.13033-8e-3 -0.18087-2e-3 -0.16123 0.0187-0.22496 0.0454-0.39584-0.0527-0.17088-0.0981-0.1793-0.16656-0.24443-0.31522-0.06513-0.14868-0.21773-0.37158-0.52348-0.45269-0.30577-0.0811-0.6807 0.0739-0.67903 0.13332 0.0016 0.0594 0.25266-0.0426 0.50436 0.11576 0.25171 0.15837 0.23338 0.25624 0.28887 0.38292 0.05548 0.1267 0.15508 0.36451 0.43202 0.52349 0.27693 0.15896 0.53183 0.12449 0.66921 0.10851 0.1049-0.0122 0.13623-0.0302 0.20774 0.0243 0.21075 0.72266 0.72116 0.6429 1.6056 0.97565 0.96214 0.36199 1.2562 0.7975 2.0459 0.20929 0.57854-0.43092 0.52042-0.74207 0.38086-1.25 1.2857-0.53767 1.9604-1.5578 2.267-2.2676 0.3479-0.80508 0.39347-1.1769 1.3141-1.7115 0.92066-0.5346 1.2662-0.39043 2.1379-0.29351 0.87166 0.0969 2.3264-0.0167 3.5321-1.2284s1.3136-2.6676 1.2123-3.5388c-0.10126-0.87117-0.24735-1.2156 0.28267-2.1389 0.53002-0.92331 0.90043-0.9705 1.7038-1.3224 0.80334-0.35189 2.0055-1.1766 2.4438-2.8288 0.43829-1.6522-0.1971-2.9652-0.72037-3.669-0.52328-0.70383-0.82211-0.92957-0.82476-1.9942-0.0026-1.0646 0.2962-1.2909 0.81597-1.9973 0.51977-0.70642 1.1462-2.0227 0.6997-3.6726-0.4465-1.65-1.6516-2.47-2.4567-2.8179-0.80508-0.3479-1.1769-0.39346-1.7115-1.3141-0.5346-0.92066-0.39044-1.2667-0.29352-2.1384 0.09692-0.87167-0.0167-2.3259-1.2283-3.5316-1.2117-1.2056-2.6676-1.3141-3.5388-1.2128-0.87117 0.10126-1.2156 0.24786-2.1389-0.28215-0.92331-0.53002-0.9705-0.90044-1.3224-1.7038-0.3519-0.8033-1.1766-2.006-2.8288-2.4442-0.41304-0.10957-0.80507-0.15182-1.1715-0.14573z" fill="#336790" style="paint-order:markers fill stroke"/> + </g> + <g id="g1747" transform="translate(0 -23.387)" stroke-width=".22458"> + <g id="text1023" fill="#336790" aria-label="SETUP"> + <path id="path1713" d="m28.181 134.8q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1715" d="m32.036 126.47h4.4917v1.4036h-2.9944v1.9885h2.2458v1.4036h-2.2458v2.0353h2.9944v1.4036h-4.4917z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1717" d="m42.885 126.47v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1719" d="m43.627 126.47h1.4972v5.2403q0 0.54976 0.04094 0.74861 0.08773 0.42109 0.3743 0.67842 0.29242 0.25734 0.70767 0.25734t0.70182-0.25734q0.29242-0.25733 0.38015-0.67842 0.04094-0.19885 0.04094-0.74861v-5.2403h1.4972v5.2403q0 0.82464-0.09358 1.2223-0.19885 0.82463-0.88897 1.3452-0.68428 0.52052-1.6376 0.52052t-1.6434-0.52052q-0.68428-0.52052-0.88312-1.3452-0.09358-0.3977-0.09358-1.2223z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1721" d="m51.482 127.88v2.3862h0.36261q0.50297 0 0.73106-0.0468 0.3977-0.0819 0.63164-0.39185 0.23979-0.30997 0.23979-0.75446t-0.23979-0.75446q-0.23394-0.30997-0.63164-0.39185-0.22809-0.0468-0.73106-0.0468zm-1.4972-1.4036h1.8481q0.86558 0 1.234 0.0877 0.83049 0.19885 1.351 0.87143 0.52636 0.66673 0.52636 1.6376t-0.52636 1.6434q-0.52052 0.66673-1.351 0.86558-0.36846 0.0877-1.234 0.0877h-0.35091v3.0412h-1.4972z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> </g> + <g id="text1661" fill="#e5b62f" aria-label="TOOLS"> + <path id="path1724" d="m60.834 126.47v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1726" d="m63.068 129.08q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94745 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825-1.0176 0-1.6668-0.60825-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1728" d="m69.051 129.08q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94745 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825-1.0176 0-1.6668-0.60825-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1730" d="m73.917 134.71v-8.2347h1.4972v6.8311h3.3687v1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1732" d="m82.029 134.8q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + </g> </g> + </g> </svg> diff --git a/docs/images/banner-negative-640x320.svg b/docs/images/banner-negative-640x320.svg index fd5535fd..d45698ed 100644 --- a/docs/images/banner-negative-640x320.svg +++ b/docs/images/banner-negative-640x320.svg @@ -1,109 +1,37 @@ -<?xml version="1.0" encoding="UTF-8" standalone="no"?> -<svg - xmlns:dc="http://purl.org/dc/elements/1.1/" - xmlns:cc="http://creativecommons.org/ns#" - xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" - xmlns:svg="http://www.w3.org/2000/svg" - xmlns="http://www.w3.org/2000/svg" - id="svg1021" - version="1.1" - viewBox="0 0 169.33333 84.666662" - height="320" - width="640"> - <defs - id="defs1015" /> - <metadata - id="metadata1018"> - <rdf:RDF> - <cc:Work - rdf:about=""> - <dc:format>image/svg+xml</dc:format> - <dc:type - rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> - <dc:title></dc:title> - </cc:Work> - </rdf:RDF> - </metadata> - <g - transform="matrix(2.0252536,0,0,2.0252536,-0.20252404,-105.4321)" - id="g848"> - <rect - ry="0" - y="52.058716" - x="0.1" - height="41.805462" - width="83.610924" - id="rect830" - style="fill:#336790;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.48264033;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <g - id="layer1" - transform="matrix(0.93437246,0,0,0.93437246,0.32003559,-27.205658)"> - <g - id="g1751" - transform="matrix(0.45669594,0,0,0.45669594,-11.041899,60.847347)"> - <path - style="fill:#e5b62f;fill-opacity:1;fill-rule:nonzero;stroke:#e5b62f;stroke-width:0.4774465;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" - d="m 56.863889,87.985612 c -1.245468,-0.01183 -3.601847,0.03564 -3.601847,0.03564 -0.114287,0 -0.20619,0.09589 -0.20619,0.214976 v 1.46244 H 50.90973 v -1.46244 c 0,-0.1191 -0.0919,-0.214976 -0.206189,-0.214976 h -2.559017 c -0.114286,0 -0.206705,0.09589 -0.206705,0.214976 v 5.418788 c 0,0.1191 0.09235,0.214977 0.206705,0.214977 h 2.559017 c 0.114287,0 0.206189,-0.09587 0.206189,-0.214977 v -1.461923 h 2.146122 v 1.461923 c 0,0.1191 0.09191,0.214977 0.20619,0.214977 h 1.295011 v 6.895187 c -0.742529,0.29471 -1.265555,1.01661 -1.265555,1.86655 v 10.45001 c 0,1.11329 0.895885,2.00918 2.00918,2.00918 1.113292,0 2.009695,-0.89589 2.009695,-2.00918 v -10.45001 c 0,-0.84994 -0.523022,-1.57184 -1.265555,-1.86655 v -6.895187 h 1.402499 c 0.223398,-0.196684 0.438705,-0.985378 1.631942,-1.588534 3.204289,-1.100736 4.595703,1.625992 4.571295,1.249532 -0.02611,-0.401913 -0.203636,-2.922341 -3.001367,-4.400762 -1.218895,-0.548108 -2.652008,-1.086577 -3.038575,-1.108977 -0.03989,-0.02102 -0.331568,-0.03172 -0.746723,-0.03564 z" - id="rect934" /> - <path - style="fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.19989915;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" - d="m 59.665281,79.605246 c -1.099312,0.01828 -1.969648,0.473642 -2.497521,0.866095 -0.703832,0.523277 -0.929574,0.822626 -1.994194,0.825277 -1.06462,0.0027 -1.290873,-0.296205 -1.997295,-0.815972 -0.706421,-0.519766 -2.022679,-1.14672 -3.672644,-0.700215 -1.649965,0.446506 -2.470018,1.651613 -2.817916,2.456698 -0.347897,0.80508 -0.393465,1.177432 -1.314132,1.712036 -0.920666,0.534609 -1.266695,0.389926 -2.13837,0.293006 -0.871661,-0.09692 -2.325918,0.0172 -3.531568,1.228868 -1.205665,1.211665 -1.314089,2.667116 -1.212846,3.538286 0.101228,0.871175 0.247854,1.216089 -0.282153,2.139404 -0.530022,0.923314 -0.900427,0.969986 -1.703772,1.321884 -0.803345,0.351898 -2.005999,1.177126 -2.444296,2.829281 -0.438296,1.652161 0.197124,2.965197 0.72037,3.66903 0.523262,0.703833 0.822625,0.929572 0.825273,1.994196 0.0026,1.06462 -0.296213,1.29035 -0.815971,1.99677 -0.519773,0.70643 -1.146203,2.02269 -0.699699,3.67265 0.446504,1.64996 1.651089,2.47053 2.456181,2.81843 0.805077,0.3479 1.17743,0.39347 1.71204,1.31414 0.534596,0.92066 0.389929,1.26618 0.293006,2.13785 -0.09692,0.87166 0.01712,2.32643 1.228865,3.53208 1.211669,1.20565 2.667112,1.311 3.538286,1.20975 0.871174,-0.10126 1.21609,-0.24477 2.139405,0.28525 0.923314,0.53002 0.96998,0.90043 1.321882,1.70377 0.351903,0.80334 1.177131,2.00548 2.829285,2.44378 1.652155,0.43829 3.67799,-0.39906 3.669028,-0.72037 -0.0091,-0.3213 -1.365829,0.23152 -2.725932,-0.62425 -1.360103,-0.85577 -1.262319,-1.38407 -1.562179,-2.06861 -0.299861,-0.68453 -0.836256,-1.96976 -2.332674,-2.82876 -1.496404,-0.859 -2.876052,-0.67281 -3.618384,-0.58653 -0.742333,0.0863 -0.791945,0.23675 -1.558045,-0.52555 -0.7661,-0.76231 -0.613297,-0.81116 -0.530717,-1.55391 0.08259,-0.74275 0.260268,-2.12213 -0.606165,-3.61425 -0.866433,-1.49213 -2.153087,-2.02279 -2.839103,-2.31924 -0.686016,-0.29645 -0.803407,-0.18822 -1.085722,-1.23145 -0.282315,-1.04323 -0.129234,-1.01345 0.313676,-1.61541 0.442894,-0.60195 1.288974,-1.7064 1.284676,-3.43183 -0.0042,-1.725427 -0.855847,-2.828991 -1.30173,-3.428727 -0.445882,-0.599742 -0.600626,-0.566654 -0.323494,-1.611273 0.277117,-1.044619 0.396537,-0.936759 1.081071,-1.23662 0.684535,-0.299854 1.969763,-0.836259 2.828768,-2.332673 0.85899,-1.496409 0.672807,-2.876051 0.586528,-3.618385 -0.08624,-0.742328 -0.23624,-0.791936 0.526065,-1.558041 0.762306,-0.766106 0.810639,-0.616402 1.553394,-0.533819 0.74274,0.08258 2.122129,0.263369 3.61425,-0.603064 1.492121,-0.866434 2.020204,-2.153086 2.316655,-2.839103 0.29645,-0.686018 0.191317,-0.803408 1.234549,-1.085722 1.043233,-0.282314 1.012934,-0.129212 1.614888,0.313676 0.601956,0.442893 1.706409,1.288969 3.431833,1.284673 1.72543,-0.0043 2.825891,-0.855327 3.42563,-1.301209 0.599739,-0.445887 0.567168,-0.601134 1.611789,-0.324013 1.044619,0.27712 0.939345,0.396538 1.2392,1.081071 0.299857,0.684534 0.836779,1.969772 2.333191,2.828771 1.496414,0.858999 2.875536,0.67332 3.617867,0.587044 0.742334,-0.08628 0.791938,-0.236759 1.558044,0.525546 0.766107,0.762305 0.613818,0.810644 0.531236,1.553397 -0.08258,0.742748 -0.260784,2.122126 0.605648,3.614245 0.86643,1.492124 2.153086,2.020213 2.839103,2.316659 0.686017,0.296447 0.803923,0.191318 1.086239,1.23455 0.282313,1.043226 0.128696,1.012939 -0.314193,1.614885 -0.442896,0.601952 -1.288968,1.706403 -1.284676,3.431832 0.0043,1.72543 0.855845,2.82589 1.301729,3.42563 0.445883,0.59974 0.600615,0.56717 0.323496,1.61179 -0.277125,1.04462 -0.396539,0.93986 -1.081072,1.23972 -0.684536,0.29985 -1.96977,0.83626 -2.828769,2.33267 -0.858996,1.49642 -0.672804,2.87554 -0.586528,3.61787 0.08628,0.74233 0.236756,0.79194 -0.525548,1.55804 -0.762307,0.76611 -0.811159,0.61382 -1.553911,0.53124 -0.742751,-0.0826 -2.122128,-0.26027 -3.614251,0.60616 -1.492118,0.86644 -2.023308,2.15309 -2.319753,2.8391 -0.246329,0.57005 -0.222236,0.74776 -0.802536,0.95343 -0.08286,-0.046 -0.17925,-0.0898 -0.293521,-0.1328 -0.962139,-0.36199 -1.088237,0.12751 -1.877923,0.71571 -0.710128,0.52895 -1.218404,0.5911 -1.188558,1.28882 -0.06706,-0.007 -0.130334,-0.008 -0.180867,-0.002 -0.161229,0.0187 -0.224964,0.0454 -0.395841,-0.0527 -0.170875,-0.0981 -0.179305,-0.16656 -0.244429,-0.31522 -0.06513,-0.14868 -0.217729,-0.37158 -0.523484,-0.45269 -0.305771,-0.0811 -0.680696,0.0739 -0.679026,0.13332 0.0016,0.0594 0.252655,-0.0426 0.504361,0.11576 0.251707,0.15837 0.233379,0.25624 0.28887,0.38292 0.05548,0.1267 0.155085,0.36451 0.432017,0.52349 0.276934,0.15896 0.531828,0.12449 0.669208,0.10851 0.104899,-0.0122 0.136234,-0.0302 0.20774,0.0243 0.210754,0.72266 0.721156,0.6429 1.605587,0.97565 0.962139,0.36199 1.256184,0.7975 2.045869,0.20929 0.578536,-0.43092 0.52042,-0.74207 0.380855,-1.25005 1.285693,-0.53767 1.960354,-1.55784 2.267045,-2.26756 0.347901,-0.80508 0.393468,-1.17692 1.314133,-1.71153 0.920665,-0.5346 1.26619,-0.39043 2.137855,-0.29351 0.871664,0.0969 2.326431,-0.0167 3.532084,-1.22836 1.205653,-1.21166 1.313579,-2.66762 1.212329,-3.5388 -0.101259,-0.87117 -0.247349,-1.21557 0.28267,-2.13889 0.530016,-0.92331 0.90043,-0.9705 1.703771,-1.3224 0.803344,-0.35189 2.005486,-1.17661 2.443779,-2.82876 0.438293,-1.65216 -0.197101,-2.9652 -0.72037,-3.66903 -0.523275,-0.70383 -0.822109,-0.92957 -0.824755,-1.9942 -0.0026,-1.064617 0.296204,-1.290862 0.81597,-1.997291 0.519766,-0.706424 1.146207,-2.022685 0.699698,-3.672642 C 75.90923,93.54616 74.704122,92.726098 73.899037,92.3782 73.093955,92.030302 72.72212,91.984737 72.187513,91.064067 c -0.534601,-0.920664 -0.390437,-1.266702 -0.293521,-2.138366 0.09692,-0.87167 -0.0167,-2.325916 -1.228349,-3.531571 -1.211665,-1.20565 -2.667629,-1.314095 -3.538802,-1.212843 -0.87117,0.101258 -1.215573,0.247861 -2.138887,-0.282152 -0.923314,-0.530018 -0.970502,-0.900435 -1.3224,-1.703774 -0.351899,-0.803295 -1.176611,-2.00595 -2.828769,-2.444247 -0.413039,-0.109574 -0.805067,-0.151817 -1.171504,-0.145726 z" - id="path944" /> - </g> - <g - id="g1747" - transform="translate(0,-23.386941)"> - <g - id="text1023" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="SETUP"> - <path - id="path1713" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 28.181456,134.80041 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543912,0.0819 0.877276,0.25734 0.467881,0.23979 0.772004,0.61994 0.304122,0.38015 0.421092,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836431,0.69012 z" /> - <path - id="path1715" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 32.035622,126.47213 h 4.491654 v 1.40365 H 33.53284 v 1.98849 h 2.245827 v 1.40364 H 33.53284 v 2.03528 h 2.994436 v 1.40364 h -4.491654 z" /> - <path - id="path1717" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 42.884604,126.47213 v 1.40365 h -1.871523 v 6.83105 h -1.497218 v -6.83105 h -1.871522 v -1.40365 z" /> - <path - id="path1719" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 43.627365,126.47213 h 1.497218 v 5.24027 q 0,0.54976 0.04094,0.74861 0.08773,0.42109 0.374305,0.67842 0.292425,0.25734 0.707669,0.25734 0.415244,0 0.701821,-0.25734 0.292425,-0.25733 0.380153,-0.67842 0.04094,-0.19885 0.04094,-0.74861 v -5.24027 h 1.497218 v 5.24027 q 0,0.82464 -0.09358,1.22234 -0.198849,0.82463 -0.888973,1.34515 -0.684275,0.52052 -1.637582,0.52052 -0.953307,0 -1.643431,-0.52052 -0.684275,-0.52052 -0.883124,-1.34515 -0.09358,-0.3977 -0.09358,-1.22234 z" /> - <path - id="path1721" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 51.48191,127.87578 v 2.38619 h 0.362607 q 0.502972,0 0.731064,-0.0468 0.397698,-0.0819 0.631638,-0.39185 0.239789,-0.30997 0.239789,-0.75446 0,-0.44449 -0.239789,-0.75446 -0.23394,-0.30997 -0.631638,-0.39185 -0.228092,-0.0468 -0.731064,-0.0468 z m -1.497218,-1.40365 h 1.848128 q 0.865579,0 1.234035,0.0877 0.830488,0.19885 1.351006,0.87143 0.526365,0.66673 0.526365,1.63758 0,0.97085 -0.526365,1.64343 -0.520518,0.66673 -1.351006,0.86558 -0.368456,0.0877 -1.234035,0.0877 h -0.35091 v 3.04122 h -1.497218 z" /> - </g> - <g - id="text1661" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#e5b62f;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="TOOLS"> - <path - id="path1724" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 60.833675,126.47213 v 1.40365 h -1.871523 v 6.83105 h -1.497218 v -6.83105 h -1.871522 v -1.40365 z" /> - <path - id="path1726" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 63.067804,129.08057 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105274,0.5673 0.386002,0.93576 0.286577,0.36261 0.74276,0.36261 0.456184,0 0.736912,-0.36261 0.286577,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105273,-0.56731 -0.39185,-0.92991 -0.280728,-0.36846 -0.736912,-0.36846 -0.456183,0 -0.74276,0.36846 -0.280728,0.3626 -0.386002,0.92991 z m -1.421187,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04113 0.23394,-0.94745 0.883125,-1.5557 0.649184,-0.61409 1.666824,-0.61409 1.017641,0 1.666825,0.61409 0.649185,0.60825 0.883125,1.5557 0.163758,0.64919 0.163758,2.04113 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60825 -1.666825,0.60825 -1.01764,0 -1.666824,-0.60825 -0.649185,-0.61409 -0.883125,-1.56155 z" /> - <path - id="path1728" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 69.050828,129.08057 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105273,0.5673 0.386001,0.93576 0.286577,0.36261 0.742761,0.36261 0.456183,0 0.736912,-0.36261 0.286576,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105274,-0.56731 -0.39185,-0.92991 -0.280729,-0.36846 -0.736912,-0.36846 -0.456184,0 -0.742761,0.36846 -0.280728,0.3626 -0.386001,0.92991 z m -1.421188,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04113 0.233941,-0.94745 0.883125,-1.5557 0.649184,-0.61409 1.666825,-0.61409 1.01764,0 1.666824,0.61409 0.649185,0.60825 0.883125,1.5557 0.163758,0.64919 0.163758,2.04113 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60825 -1.666824,0.60825 -1.017641,0 -1.666825,-0.60825 -0.649184,-0.61409 -0.883125,-1.56155 z" /> - <path - id="path1730" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 73.916786,134.70683 v -8.2347 h 1.497218 v 6.83106 h 3.368741 v 1.40364 z" /> - <path - id="path1732" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 82.028666,134.80041 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543911,0.0819 0.877276,0.25734 0.467881,0.23979 0.772003,0.61994 0.304123,0.38015 0.421093,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836431,0.69012 z" /> - </g> - </g> +<?xml version="1.0" encoding="UTF-8"?> +<svg id="svg1021" width="640" height="320" version="1.1" viewBox="0 0 169.33 84.667" xmlns="http://www.w3.org/2000/svg" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"> + <metadata id="metadata1018"> + <rdf:RDF> + <cc:Work rdf:about=""> + <dc:format>image/svg+xml</dc:format> + <dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/> + <dc:title/> + </cc:Work> + </rdf:RDF> + </metadata> + <g id="g848" transform="matrix(2.0253 0 0 2.0253 -.20252 -105.43)"> + <rect id="rect830" x=".1" y="52.059" width="83.611" height="41.805" ry="0" fill="#336790" style="paint-order:markers fill stroke"/> + <g id="layer1" transform="matrix(.93437 0 0 .93437 .32004 -27.206)"> + <g id="g1751" transform="matrix(.4567 0 0 .4567 -11.042 60.847)"> + <path id="rect934" d="m56.864 87.986c-1.2455-0.01183-3.6018 0.03564-3.6018 0.03564-0.11429 0-0.20619 0.09589-0.20619 0.21498v1.4624h-2.1461v-1.4624c0-0.1191-0.0919-0.21498-0.20619-0.21498h-2.559c-0.11429 0-0.2067 0.09589-0.2067 0.21498v5.4188c0 0.1191 0.09235 0.21498 0.2067 0.21498h2.559c0.11429 0 0.20619-0.09587 0.20619-0.21498v-1.4619h2.1461v1.4619c0 0.1191 0.09191 0.21498 0.20619 0.21498h1.295v6.8952c-0.74253 0.29471-1.2656 1.0166-1.2656 1.8666v10.45c0 1.1133 0.89588 2.0092 2.0092 2.0092 1.1133 0 2.0097-0.89589 2.0097-2.0092v-10.45c0-0.84994-0.52302-1.5718-1.2656-1.8666v-6.8952h1.4025c0.2234-0.19668 0.4387-0.98538 1.6319-1.5885 3.2043-1.1007 4.5957 1.626 4.5713 1.2495-0.02611-0.40191-0.20364-2.9223-3.0014-4.4008-1.2189-0.54811-2.652-1.0866-3.0386-1.109-0.03989-0.02102-0.33157-0.03172-0.74672-0.03564z" fill="#e5b62f" stroke="#e5b62f" stroke-width=".47745" style="paint-order:markers fill stroke"/> + <path id="path944" d="m59.665 79.605c-1.0993 0.01828-1.9696 0.47364-2.4975 0.8661-0.70383 0.52328-0.92957 0.82263-1.9942 0.82528-1.0646 0.0027-1.2909-0.2962-1.9973-0.81597-0.70642-0.51977-2.0227-1.1467-3.6726-0.70022-1.65 0.44651-2.47 1.6516-2.8179 2.4567-0.3479 0.80508-0.39346 1.1774-1.3141 1.712-0.92067 0.53461-1.2667 0.38993-2.1384 0.29301-0.87166-0.09692-2.3259 0.0172-3.5316 1.2289-1.2057 1.2117-1.3141 2.6671-1.2128 3.5383 0.10123 0.87118 0.24785 1.2161-0.28215 2.1394-0.53002 0.92331-0.90043 0.96999-1.7038 1.3219s-2.006 1.1771-2.4443 2.8293c-0.4383 1.6522 0.19712 2.9652 0.72037 3.669 0.52326 0.70383 0.82262 0.92957 0.82527 1.9942 0.0026 1.0646-0.29621 1.2904-0.81597 1.9968-0.51977 0.70643-1.1462 2.0227-0.6997 3.6726s1.6511 2.4705 2.4562 2.8184c0.80508 0.3479 1.1774 0.39347 1.712 1.3141 0.5346 0.92066 0.38993 1.2662 0.29301 2.1378-0.09692 0.87166 0.01712 2.3264 1.2289 3.5321 1.2117 1.2056 2.6671 1.311 3.5383 1.2098 0.87117-0.10126 1.2161-0.24477 2.1394 0.28525 0.92331 0.53002 0.96998 0.90043 1.3219 1.7038 0.3519 0.80334 1.1771 2.0055 2.8293 2.4438 1.6522 0.43829 3.678-0.39906 3.669-0.72037-0.0091-0.3213-1.3658 0.23152-2.7259-0.62425s-1.2623-1.3841-1.5622-2.0686c-0.29986-0.68453-0.83626-1.9698-2.3327-2.8288-1.4964-0.859-2.8761-0.67281-3.6184-0.58653-0.74233 0.0863-0.79194 0.23675-1.558-0.52555-0.7661-0.76231-0.6133-0.81116-0.53072-1.5539 0.08259-0.74275 0.26027-2.1221-0.60616-3.6142-0.86643-1.4921-2.1531-2.0228-2.8391-2.3192s-0.80341-0.18822-1.0857-1.2314-0.12923-1.0134 0.31368-1.6154c0.44289-0.60195 1.289-1.7064 1.2847-3.4318-0.0042-1.7254-0.85585-2.829-1.3017-3.4287-0.44588-0.59974-0.60063-0.56665-0.32349-1.6113 0.27712-1.0446 0.39654-0.93676 1.0811-1.2366 0.68454-0.29985 1.9698-0.83626 2.8288-2.3327 0.85899-1.4964 0.67281-2.8761 0.58653-3.6184-0.08624-0.74233-0.23624-0.79194 0.52606-1.558 0.76231-0.76611 0.81064-0.6164 1.5534-0.53382 0.74274 0.08258 2.1221 0.26337 3.6142-0.60306 1.4921-0.86643 2.0202-2.1531 2.3167-2.8391 0.29645-0.68602 0.19132-0.80341 1.2345-1.0857 1.0432-0.28231 1.0129-0.12921 1.6149 0.31368 0.60196 0.44289 1.7064 1.289 3.4318 1.2847 1.7254-0.0043 2.8259-0.85533 3.4256-1.3012 0.59974-0.44589 0.56717-0.60113 1.6118-0.32401 1.0446 0.27712 0.93934 0.39654 1.2392 1.0811 0.29986 0.68453 0.83678 1.9698 2.3332 2.8288 1.4964 0.859 2.8755 0.67332 3.6179 0.58704 0.74233-0.08628 0.79194-0.23676 1.558 0.52555 0.76611 0.7623 0.61382 0.81064 0.53124 1.5534-0.08258 0.74275-0.26078 2.1221 0.60565 3.6142 0.86643 1.4921 2.1531 2.0202 2.8391 2.3167 0.68602 0.29645 0.80392 0.19132 1.0862 1.2346 0.28231 1.0432 0.1287 1.0129-0.31419 1.6149-0.4429 0.60195-1.289 1.7064-1.2847 3.4318 0.0043 1.7254 0.85584 2.8259 1.3017 3.4256 0.44588 0.59974 0.60062 0.56717 0.3235 1.6118-0.27712 1.0446-0.39654 0.93986-1.0811 1.2397-0.68454 0.29985-1.9698 0.83626-2.8288 2.3327-0.859 1.4964-0.6728 2.8755-0.58653 3.6179 0.08628 0.74233 0.23676 0.79194-0.52555 1.558-0.76231 0.76611-0.81116 0.61382-1.5539 0.53124-0.74275-0.0826-2.1221-0.26027-3.6143 0.60616-1.4921 0.86644-2.0233 2.1531-2.3198 2.8391-0.24633 0.57005-0.22224 0.74776-0.80254 0.95343-0.08286-0.046-0.17925-0.0898-0.29352-0.1328-0.96214-0.36199-1.0882 0.12751-1.8779 0.71571-0.71013 0.52895-1.2184 0.5911-1.1886 1.2888-0.06706-7e-3 -0.13033-8e-3 -0.18087-2e-3 -0.16123 0.0187-0.22496 0.0454-0.39584-0.0527-0.17088-0.0981-0.1793-0.16656-0.24443-0.31522-0.06513-0.14868-0.21773-0.37158-0.52348-0.45269-0.30577-0.0811-0.6807 0.0739-0.67903 0.13332 0.0016 0.0594 0.25266-0.0426 0.50436 0.11576 0.25171 0.15837 0.23338 0.25624 0.28887 0.38292 0.05548 0.1267 0.15508 0.36451 0.43202 0.52349 0.27693 0.15896 0.53183 0.12449 0.66921 0.10851 0.1049-0.0122 0.13623-0.0302 0.20774 0.0243 0.21075 0.72266 0.72116 0.6429 1.6056 0.97565 0.96214 0.36199 1.2562 0.7975 2.0459 0.20929 0.57854-0.43092 0.52042-0.74207 0.38086-1.25 1.2857-0.53767 1.9604-1.5578 2.267-2.2676 0.3479-0.80508 0.39347-1.1769 1.3141-1.7115 0.92066-0.5346 1.2662-0.39043 2.1379-0.29351 0.87166 0.0969 2.3264-0.0167 3.5321-1.2284s1.3136-2.6676 1.2123-3.5388c-0.10126-0.87117-0.24735-1.2156 0.28267-2.1389 0.53002-0.92331 0.90043-0.9705 1.7038-1.3224 0.80334-0.35189 2.0055-1.1766 2.4438-2.8288 0.43829-1.6522-0.1971-2.9652-0.72037-3.669-0.52328-0.70383-0.82211-0.92957-0.82476-1.9942-0.0026-1.0646 0.2962-1.2909 0.81597-1.9973 0.51977-0.70642 1.1462-2.0227 0.6997-3.6726-0.4465-1.65-1.6516-2.47-2.4567-2.8179-0.80508-0.3479-1.1769-0.39346-1.7115-1.3141-0.5346-0.92066-0.39044-1.2667-0.29352-2.1384 0.09692-0.87167-0.0167-2.3259-1.2283-3.5316-1.2117-1.2056-2.6676-1.3141-3.5388-1.2128-0.87117 0.10126-1.2156 0.24786-2.1389-0.28215-0.92331-0.53002-0.9705-0.90044-1.3224-1.7038-0.3519-0.8033-1.1766-2.006-2.8288-2.4442-0.41304-0.10957-0.80507-0.15182-1.1715-0.14573z" fill="#fff" style="paint-order:markers fill stroke"/> + </g> + <g id="g1747" transform="translate(0 -23.387)" stroke-width=".22458"> + <g id="text1023" fill="#fff" aria-label="SETUP"> + <path id="path1713" d="m28.181 134.8q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1715" d="m32.036 126.47h4.4917v1.4036h-2.9944v1.9885h2.2458v1.4036h-2.2458v2.0353h2.9944v1.4036h-4.4917z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1717" d="m42.885 126.47v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1719" d="m43.627 126.47h1.4972v5.2403q0 0.54976 0.04094 0.74861 0.08773 0.42109 0.3743 0.67842 0.29242 0.25734 0.70767 0.25734t0.70182-0.25734q0.29242-0.25733 0.38015-0.67842 0.04094-0.19885 0.04094-0.74861v-5.2403h1.4972v5.2403q0 0.82464-0.09358 1.2223-0.19885 0.82463-0.88897 1.3452-0.68428 0.52052-1.6376 0.52052t-1.6434-0.52052q-0.68428-0.52052-0.88312-1.3452-0.09358-0.3977-0.09358-1.2223z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1721" d="m51.482 127.88v2.3862h0.36261q0.50297 0 0.73106-0.0468 0.3977-0.0819 0.63164-0.39185 0.23979-0.30997 0.23979-0.75446t-0.23979-0.75446q-0.23394-0.30997-0.63164-0.39185-0.22809-0.0468-0.73106-0.0468zm-1.4972-1.4036h1.8481q0.86558 0 1.234 0.0877 0.83049 0.19885 1.351 0.87143 0.52636 0.66673 0.52636 1.6376t-0.52636 1.6434q-0.52052 0.66673-1.351 0.86558-0.36846 0.0877-1.234 0.0877h-0.35091v3.0412h-1.4972z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> </g> + <g id="text1661" fill="#e5b62f" aria-label="TOOLS"> + <path id="path1724" d="m60.834 126.47v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1726" d="m63.068 129.08q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94745 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825-1.0176 0-1.6668-0.60825-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1728" d="m69.051 129.08q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94745 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825-1.0176 0-1.6668-0.60825-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1730" d="m73.917 134.71v-8.2347h1.4972v6.8311h3.3687v1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1732" d="m82.029 134.8q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + </g> </g> + </g> </svg> diff --git a/docs/images/favicon.svg b/docs/images/favicon.svg index 3ac5daf9..a1d31916 100644 --- a/docs/images/favicon.svg +++ b/docs/images/favicon.svg @@ -1,55 +1,23 @@ -<?xml version="1.0" encoding="UTF-8" standalone="no"?> -<svg - xmlns:dc="http://purl.org/dc/elements/1.1/" - xmlns:cc="http://creativecommons.org/ns#" - xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" - xmlns:svg="http://www.w3.org/2000/svg" - xmlns="http://www.w3.org/2000/svg" - width="32" - height="32" - viewBox="0 0 8.4666666 8.4666664" - version="1.1" - id="svg1021"> - <style> - #snake { fill: #336790 } +<?xml version="1.0" encoding="UTF-8"?> +<svg id="svg1021" width="32" height="32" version="1.1" viewBox="0 0 8.4667 8.4667" xmlns="http://www.w3.org/2000/svg" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"> + <style>#snake { fill: #336790 } #background { fill: none } #hammer { fill:#e5b62f } @media (prefers-color-scheme: dark) { #snake { fill: #FFFFFF } #background { fill: #336790 } - } - </style> - <defs - id="defs1015" /> - <metadata - id="metadata1018"> - <rdf:RDF> - <cc:Work - rdf:about=""> - <dc:format>image/svg+xml</dc:format> - <dc:type - rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> - <dc:title></dc:title> - </cc:Work> - </rdf:RDF> - </metadata> - <rect - ry="0.50735909" - y="2.7838135e-07" - x="0" - height="8.4666662" - width="8.4666662" - id="background" - style="fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.40613541;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:normal" /> - <path - transform="scale(0.26458333)" - id="snake" - d="m 19.164062,0.02929688 c -0.881935,0.01466502 -1.571827,0.37352651 -1.99414,0.6875 C 16.907205,0.91211867 16.752096,1.0429315 16.609375,1.1152344 16.466654,1.1875372 16.31191,1.2315625 15.966797,1.2324219 15.621764,1.2332969 15.46711,1.1927388 15.324219,1.1210938 15.181328,1.0494487 15.01967,0.91676115 14.755859,0.72265625 14.190346,0.30656609 13.13376,-0.19174342 11.826172,0.16210938 10.518579,0.51596429 9.8526853,1.4825431 9.5742188,2.1269531 9.4443428,2.4275046 9.3803892,2.6198969 9.2929688,2.7539062 9.2055521,2.8879154 9.0894301,3.0005473 8.7910156,3.1738281 8.4925766,3.3471244 8.3394228,3.3869479 8.1796875,3.3964844 8.0199533,3.4060201 7.817558,3.3721151 7.4921875,3.3359375 6.7946621,3.2583778 5.6274847,3.3579602 4.671875,4.3183594 3.7163191,5.2786708 3.6240093,6.4410954 3.7050781,7.1386719 3.7428658,7.4638927 3.7801962,7.6645254 3.7714844,7.8242188 3.7627726,7.9839114 3.7225861,8.1382015 3.5507812,8.4375 3.3789212,8.7368839 3.2640613,8.8515655 3.1308594,8.9394531 2.9976575,9.0273423 2.8096693,9.0951899 2.5097656,9.2265625 1.8671081,9.5080729 0.9001495,10.17675 0.55273438,11.486328 c -0.34741581,1.309585 0.15757059,2.366613 0.57617182,2.929688 0.1953185,0.262718 0.3261338,0.419778 0.3984376,0.5625 0.072304,0.142722 0.1182822,0.295513 0.1191406,0.640625 8.427e-4,0.345044 -0.039708,0.49979 -0.1113282,0.642578 -0.071621,0.142787 -0.2043154,0.298664 -0.3984374,0.5625 -0.41616724,0.565615 -0.91437965,1.629985 -0.56054692,2.9375 0.35382917,1.307501 1.31830112,1.965598 1.96289062,2.24414 0.3005507,0.12988 0.4929425,0.199689 0.6269531,0.28711 0.1340107,0.08742 0.2485919,0.203538 0.421875,0.501953 0.1733076,0.298465 0.2111682,0.453661 0.2207032,0.613281 0.00952,0.159621 -0.022405,0.360177 -0.058594,0.685547 -0.07756,0.697526 0.021947,1.864663 0.9824219,2.820312 0.9605356,0.955764 2.1216246,1.047773 2.8183593,0.966797 0.32479,-0.03776 0.5277723,-0.07552 0.6875,-0.06641 0.1597267,0.0091 0.3195734,0.05069 0.6191407,0.222656 0.2993839,0.171859 0.4082065,0.278908 0.4960937,0.41211 0.087889,0.133202 0.1557368,0.329002 0.2871094,0.628906 0.281537,0.642705 0.9502582,1.599867 2.2597662,1.947266 0.680971,0.180653 1.392901,0.100853 1.947265,-0.05078 0.277183,-0.07582 0.513828,-0.171379 0.701172,-0.273437 0.09367,-0.05103 0.176325,-0.100802 0.251953,-0.169922 0.07563,-0.06912 0.183975,-0.159093 0.177735,-0.382812 -0.0045,-0.159285 -0.156589,-0.338321 -0.265625,-0.384766 -0.109037,-0.04644 -0.180796,-0.04758 -0.251953,-0.05078 -0.142315,-0.0064 -0.282626,0.0029 -0.449219,0 -0.333183,-0.0058 -0.736522,-0.05383 -1.144531,-0.310547 C 11.961972,28.832785 12.146017,28.67427 11.892578,28.095702 11.682008,27.615009 11.24737,26.574655 10.076172,25.902344 8.9048936,25.22998 7.7923475,25.376961 7.2714844,25.4375 6.9834745,25.47098 6.8742171,25.49598 6.8496094,25.49023 6.8250014,25.48453 6.7206857,25.43228 6.4589844,25.171871 6.1973439,24.911525 6.1482043,24.81292 6.1425781,24.789058 6.1369481,24.765208 6.1534621,24.647992 6.1855471,24.359371 6.2435513,23.837725 6.3909265,22.730177 5.7128906,21.5625 5.0348608,20.39483 3.9957419,19.962224 3.5136719,19.753906 3.24718,19.638746 3.140894,19.600646 3.1230469,19.583984 3.1051998,19.567322 3.0417536,19.475517 2.9453125,19.119141 2.8488238,18.762589 2.8560221,18.64966 2.8632812,18.625 c 0.00726,-0.02464 0.080382,-0.114475 0.2519532,-0.347656 0.3105092,-0.422022 0.9916454,-1.313449 0.9882812,-2.664063 -0.00329,-1.35063 -0.6909248,-2.237226 -1.0039062,-2.658203 -0.1730139,-0.232717 -0.2427543,-0.323747 -0.25,-0.347656 -0.00725,-0.02393 -0.014608,-0.137235 0.080078,-0.494141 0.094621,-0.356685 0.1519442,-0.44794 0.1699219,-0.464843 0.017979,-0.01689 0.1265424,-0.05338 0.3925781,-0.169922 C 3.9728946,11.267948 5.0112797,10.833303 5.6835938,9.6621094 6.355949,8.4908288 6.2089761,7.378288 6.1484375,6.8574219 6.1149868,6.569485 6.0918872,6.4621641 6.0976562,6.4375 6.1034389,6.4128197 6.1536341,6.3066527 6.4140625,6.0449219 6.6746118,5.7830724 6.7748036,5.7342756 6.7988281,5.7285156 c 0.024038,-0.00574 0.131793,0.012875 0.4199219,0.044922 0.5206322,0.057887 1.6386324,0.1977607 2.806641,-0.4804687 1.167821,-0.6781221 1.59277,-1.7120003 1.800781,-2.1933594 0.114973,-0.2660648 0.160492,-0.3722564 0.177734,-0.390625 0.01723,-0.018368 0.108378,-0.081269 0.464844,-0.1777344 0.356564,-0.096492 0.469639,-0.087296 0.494141,-0.080078 0.02449,0.00722 0.10662,0.078405 0.339843,0.25 0.422026,0.3105082 1.313454,0.9857845 2.664063,0.9824219 1.350536,-0.00337 2.245394,-0.6833784 2.666015,-0.9960938 0.232729,-0.1730249 0.313793,-0.2446401 0.337891,-0.2519531 0.02411,-0.00731 0.139256,-0.012632 0.496094,0.082031 0.356761,0.094642 0.447453,0.1515103 0.464844,0.1699219 0.01739,0.01841 0.0594,0.1249453 0.175781,0.390625 0.210633,0.4808426 0.645245,1.5132539 1.816406,2.1855469 1.171199,0.6723126 2.283682,0.5253967 2.804688,0.4648437 0.288015,-0.033487 0.397268,-0.050663 0.421875,-0.044922 0.0246,0.00574 0.12892,0.057957 0.390625,0.3183593 0.261702,0.2604019 0.312739,0.3590525 0.318359,0.3828125 0.0056,0.023773 -0.02064,0.1333266 -0.05273,0.421875 -0.05798,0.5214747 -0.197598,1.636956 0.480468,2.8046875 0.67812,1.1678251 1.717856,1.5927721 2.199219,1.8007811 0.266094,0.114985 0.374121,0.152612 0.392578,0.169922 0.01844,0.01731 0.07933,0.116243 0.175782,0.472657 0.09648,0.356507 0.08927,0.469616 0.08203,0.49414 -0.0073,0.02453 -0.08032,0.106568 -0.251953,0.339844 -0.310507,0.422024 -0.991642,1.313451 -0.988282,2.664062 0.0034,1.35061 0.691271,2.245503 1.003907,2.666016 0.172981,0.232672 0.242709,0.315768 0.25,0.339844 0.0073,0.02407 0.01461,0.137249 -0.08008,0.49414 -0.09465,0.356776 -0.159233,0.447355 -0.177734,0.464844 -0.01848,0.0175 -0.117205,0.06139 -0.382812,0.177734 -0.480712,0.210566 -1.521049,0.645216 -2.19336,1.816407 -0.672341,1.171256 -0.525378,2.283848 -0.464844,2.804687 0.03349,0.288015 0.05652,0.395314 0.05078,0.419922 -0.0057,0.02461 -0.056,0.123066 -0.316406,0.384766 -0.260376,0.261672 -0.360852,0.310752 -0.384766,0.316406 -0.02392,0.0057 -0.131385,-0.01088 -0.419922,-0.04297 -0.521658,-0.05801 -1.637009,-0.19952 -2.804688,0.478516 -1.167626,0.678015 -1.600215,1.718958 -1.808593,2.201172 -0.09505,0.219968 -0.141462,0.345002 -0.16211,0.375 -0.0078,0.01134 -0.06163,0.02709 -0.07422,0.03711 -0.02506,-0.01043 -0.04775,-0.01936 -0.07422,-0.0293 -0.399013,-0.150123 -0.773529,-0.126862 -1.033203,0.0293 -0.259676,0.156166 -0.408366,0.345158 -0.658203,0.53125 -0.224973,0.167577 -0.423343,0.259767 -0.626954,0.421875 -0.107803,0.08583 -0.131342,0.277429 -0.199218,0.427735 -0.01406,0.0015 -0.03043,0.0072 -0.0293,0.0078 0.0011,-5.19e-4 0.0011,-0.0012 -0.02148,-0.05273 -0.07693,-0.175599 -0.263313,-0.438408 -0.613281,-0.53125 -0.201044,-0.05332 -0.376799,-0.0307 -0.517578,0.0078 -0.07039,0.01924 -0.128681,0.04787 -0.191406,0.08203 -0.03137,0.01708 -0.06926,0.03382 -0.111328,0.07227 -0.02105,0.0192 -0.06446,0.07891 -0.06641,0.08203 -9.72e-4,0.0016 -0.04249,0.08644 -0.04297,0.08789 -4.78e-4,0.0015 -0.01566,0.123677 -0.01563,0.125 3.5e-5,0.0013 0.01525,0.110221 0.01563,0.111328 3.77e-4,0.0011 0.0657,0.110417 0.06641,0.111328 0.0014,0.0018 0.143808,0.116046 0.146484,0.117188 0.0027,0.0011 0.0723,0.02559 0.0957,0.0293 0.0234,0.0037 0.04672,0.0072 0.06055,0.0078 0.05531,0.0025 0.06953,-3.35e-4 0.08789,0 0.03674,6.71e-4 0.04558,-3.61e-4 0.05859,0.0078 0.114878,0.07228 -0.01299,-0.06995 0.06641,0.111329 0.03659,0.08358 0.157315,0.371742 0.458984,0.544921 0.285797,0.164047 0.557684,0.123427 0.664063,0.111329 0.122234,0.17551 0.227075,0.381926 0.390625,0.451171 0.249607,0.10568 0.48318,0.143002 0.767578,0.25 0.309048,0.116278 0.511479,0.259046 0.804687,0.339844 0.293209,0.0808 0.670046,0.01236 1.003907,-0.236328 0.244935,-0.182387 0.41372,-0.403082 0.464843,-0.65039 0.03446,-0.166685 -0.0068,-0.292449 -0.03711,-0.427735 0.788892,-0.450955 1.365971,-1.084607 1.580078,-1.580078 0.129867,-0.300526 0.193854,-0.493042 0.28125,-0.626953 0.0874,-0.133911 0.203495,-0.246614 0.501953,-0.419922 0.298507,-0.173333 0.453654,-0.218916 0.613282,-0.228516 0.159628,-0.0096 0.360244,0.03024 0.685546,0.06641 0.697397,0.07753 1.864628,-0.02197 2.820313,-0.982422 0.955619,-0.960381 1.047858,-2.130661 0.966797,-2.828125 -0.03779,-0.325153 -0.07518,-0.525968 -0.06641,-0.685547 0.0088,-0.159579 0.04885,-0.313911 0.220703,-0.613281 0.171874,-0.299407 0.288646,-0.413982 0.421875,-0.501954 0.133228,-0.08797 0.319297,-0.155763 0.61914,-0.287109 0.642596,-0.281476 1.60182,-0.950241 1.949219,-2.259765 0.347411,-1.30958 -0.149729,-2.358792 -0.568359,-2.921876 -0.195287,-0.262669 -0.326211,-0.419796 -0.398438,-0.5625 -0.07223,-0.142702 -0.118282,-0.297402 -0.11914,-0.642578 -8.43e-4,-0.345046 0.03969,-0.497728 0.111328,-0.640625 0.07164,-0.142896 0.204339,-0.304554 0.398437,-0.568359 0.416158,-0.565608 0.914385,-1.624126 0.560547,-2.931641 C 31.069974,10.104522 30.10535,9.4386232 29.460938,9.1601562 29.160411,9.0302905 28.967893,8.9682545 28.833984,8.8808594 28.700076,8.7934642 28.585417,8.6773669 28.412109,8.3789062 28.23879,8.0804241 28.20101,7.9253694 28.191406,7.765625 28.181803,7.6058806 28.213832,7.4053715 28.25,7.0800781 28.327543,6.3826808 28.22993,5.2154139 27.269531,4.2597656 26.309184,3.304186 25.139011,3.2118898 24.441406,3.2929688 24.116184,3.3307703 23.921292,3.3680852 23.761719,3.359375 23.602146,3.3506648 23.441903,3.308541 23.142578,3.1367188 22.843169,2.9648464 22.736409,2.8500263 22.648438,2.7167969 22.560466,2.5835674 22.490719,2.3955447 22.359375,2.0957031 22.077917,1.4532062 21.403348,0.48804519 20.09375,0.140625 19.80783,0.06477419 19.542151,0.04950523 19.28125,0.04492187 19.2527,0.03616177 19.22318,0.03091475 19.19336,0.02929688 c -0.0092,7.604e-5 -0.02011,-1.5289e-4 -0.0293,0 z" - style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.73499995;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:normal;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" /> - <path - transform="scale(0.26458333)" - id="hammer" - d="m 17.177734,5.8535156 c -0.90458,-0.00858 -2.566643,0.023249 -2.576172,0.023437 -0.369531,0 -0.656249,0.3200684 -0.65625,0.6699219 V 7.0722656 H 13.443359 V 6.546875 c 0,-0.3498217 -0.296513,-0.6699219 -0.666015,-0.6699219 h -1.822266 c -0.369543,0 -0.664062,0.3185831 -0.664062,0.6699219 v 3.861328 c 0,0.351137 0.294665,0.671875 0.664062,0.671875 h 1.822266 c 0.369467,0 0.666015,-0.322067 0.666015,-0.671875 V 9.8847656 h 0.501953 v 0.5234374 c 0,0.349856 0.286741,0.671875 0.65625,0.671875 h 0.40625 v 4.400391 c -0.429509,0.35793 -0.90039,0.728801 -0.90039,1.328125 v 7.449218 c 0,1.069969 0.877293,1.947266 1.947266,1.947266 1.069971,0 1.949218,-0.877137 1.949218,-1.947266 v -7.449218 c 0,-0.600462 -0.469465,-0.970206 -0.90039,-1.328125 v -4.400391 h 0.679687 l 0.140625,-0.132812 c 0.274298,-0.241496 0.262144,-0.358095 0.390625,-0.523438 0.121344,-0.156157 0.30876,-0.331513 0.626953,-0.501953 0.977923,-0.3263244 1.58437,-0.096178 2.015625,0.191406 0.217977,0.145361 0.38435,0.304544 0.501953,0.427735 0.0588,0.06159 0.0867,0.105594 0.169922,0.177734 0.02079,0.01803 0.04559,0.03682 0.0957,0.06641 0.02506,0.01482 0.10911,0.05004 0.111329,0.05078 0.0022,7.36e-4 0.21117,0.02363 0.21289,0.02344 8.58e-4,-8.7e-5 0.163329,-0.03677 0.164063,-0.03711 7.33e-4,-3.22e-4 0.138067,-0.09514 0.138671,-0.0957 0.0012,-0.0011 0.139907,-0.220547 0.140626,-0.222656 7.18e-4,-0.0021 0.02688,-0.09873 0.0293,-0.132812 0.0024,-0.03409 0.0014,-0.05869 0,-0.08008 -0.0126,-0.208885 -0.08797,-0.3220522 -0.11524,-0.421875 C 22.080106,9.10767 22.046876,7.7572994 20.107422,6.7324219 l -0.01367,-0.00781 -0.01563,-0.00781 C 19.632346,6.5163047 19.154576,6.3159935 18.742188,6.1640625 18.535993,6.088097 18.345241,6.0274915 18.181641,5.9785156 18.061144,5.9424434 17.959243,5.9233384 17.849609,5.90625 c -0.0052,-8.054e-4 -0.01037,-0.00706 -0.01563,-0.00781 -0.0243,-0.00677 -0.04332,-0.01279 -0.05859,-0.015625 -0.01822,-0.00334 -0.03075,-0.00412 -0.04492,-0.00586 -0.0096,-0.00119 -0.02019,-0.00691 -0.0293,-0.00781 -0.04519,-0.00461 -0.08879,-0.00551 -0.140625,-0.00781 -0.103688,-0.00461 -0.231577,-0.00638 -0.382813,-0.00781 z" - style="fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:1.03000009;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> + }</style> + <metadata id="metadata1018"> + <rdf:RDF> + <cc:Work rdf:about=""> + <dc:format>image/svg+xml</dc:format> + <dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/> + <dc:title/> + </cc:Work> + </rdf:RDF> + </metadata> + <rect id="background" y="2.7838e-7" width="8.4667" height="8.4667" ry=".50736" style="paint-order:normal"/> + <path id="snake" transform="scale(.26458)" d="m19.164 0.029297c-0.88194 0.014665-1.5718 0.37353-1.9941 0.6875-0.26272 0.19532-0.41783 0.32613-0.56055 0.39844-0.14272 0.072303-0.29746 0.11633-0.64258 0.11719-0.34503 8.75e-4 -0.49969-0.039683-0.64258-0.11133-0.14289-0.071645-0.30455-0.20433-0.56836-0.39844-0.56551-0.41609-1.6221-0.9144-2.9297-0.56055-1.3076 0.35385-1.9735 1.3204-2.252 1.9648-0.12988 0.30055-0.19383 0.49294-0.28125 0.62695-0.087417 0.13401-0.20354 0.24664-0.50195 0.41992-0.29844 0.1733-0.45159 0.21312-0.61133 0.22266-0.15973 0.0095357-0.36213-0.024369-0.6875-0.060547-0.69753-0.07756-1.8647 0.022023-2.8203 0.98242-0.95556 0.96031-1.0479 2.1227-0.9668 2.8203 0.037788 0.32522 0.075118 0.52585 0.066406 0.68555-0.0087118 0.15969-0.048898 0.31398-0.2207 0.61328-0.17186 0.29938-0.28672 0.41407-0.41992 0.50195-0.1332 0.087889-0.32119 0.15574-0.62109 0.28711-0.64266 0.28151-1.6096 0.95019-1.957 2.2598-0.34742 1.3096 0.15757 2.3666 0.57617 2.9297 0.19532 0.26272 0.32613 0.41978 0.39844 0.5625 0.072304 0.14272 0.11828 0.29551 0.11914 0.64062 8.427e-4 0.34504-0.039708 0.49979-0.11133 0.64258-0.071621 0.14279-0.20432 0.29866-0.39844 0.5625-0.41617 0.56562-0.91438 1.63-0.56055 2.9375 0.35383 1.3075 1.3183 1.9656 1.9629 2.2441 0.30055 0.12988 0.49294 0.19969 0.62695 0.28711 0.13401 0.08742 0.24859 0.20354 0.42188 0.50195 0.17331 0.29846 0.21117 0.45366 0.2207 0.61328 0.00952 0.15962-0.022405 0.36018-0.058594 0.68555-0.07756 0.69753 0.021947 1.8647 0.98242 2.8203 0.96054 0.95576 2.1216 1.0478 2.8184 0.9668 0.32479-0.03776 0.52777-0.07552 0.6875-0.06641 0.15973 0.0091 0.31957 0.05069 0.61914 0.22266 0.29938 0.17186 0.40821 0.27891 0.49609 0.41211 0.087889 0.1332 0.15574 0.329 0.28711 0.62891 0.28154 0.6427 0.95026 1.5999 2.2598 1.9473 0.68097 0.18065 1.3929 0.10085 1.9473-0.05078 0.27718-0.07582 0.51383-0.17138 0.70117-0.27344 0.09367-0.05103 0.17632-0.1008 0.25195-0.16992 0.07563-0.06912 0.18398-0.15909 0.17774-0.38281-0.0045-0.15928-0.15659-0.33832-0.26562-0.38477-0.10904-0.04644-0.1808-0.04758-0.25195-0.05078-0.14232-0.0064-0.28263 0.0029-0.44922 0-0.33318-0.0058-0.73652-0.05383-1.1445-0.31055-0.90522-0.56956-0.72117-0.72807-0.97461-1.3066-0.21057-0.48069-0.64521-1.521-1.8164-2.1934-1.1713-0.67236-2.2838-0.52538-2.8047-0.46484-0.28801 0.03348-0.39727 0.05848-0.42188 0.05273-0.024608-0.0057-0.12892-0.05795-0.39062-0.31836-0.26164-0.26035-0.31078-0.35895-0.31641-0.38281-0.00563-0.02385 0.010884-0.14107 0.042969-0.42969 0.058004-0.52165 0.20538-1.6292-0.47266-2.7969-0.67803-1.1677-1.7171-1.6003-2.1992-1.8086-0.26649-0.11516-0.37278-0.15326-0.39062-0.16992s-0.081293-0.10847-0.17773-0.46484c-0.096489-0.35655-0.08929-0.46948-0.082031-0.49414 0.00726-0.02464 0.080382-0.11448 0.25195-0.34766 0.31051-0.42202 0.99165-1.3134 0.98828-2.6641-0.00329-1.3506-0.69092-2.2372-1.0039-2.6582-0.17301-0.23272-0.24275-0.32375-0.25-0.34766-0.00725-0.02393-0.014608-0.13724 0.080078-0.49414 0.094621-0.35668 0.15194-0.44794 0.16992-0.46484 0.017979-0.01689 0.12654-0.05338 0.39258-0.16992 0.48071-0.21057 1.5191-0.64521 2.1914-1.8164 0.67236-1.1713 0.52538-2.2838 0.46484-2.8047-0.033451-0.28794-0.05655-0.39526-0.050781-0.41992 0.0057827-0.02468 0.055978-0.13085 0.31641-0.39258 0.26055-0.26185 0.36074-0.31065 0.38477-0.31641 0.024038-0.00574 0.13179 0.012875 0.41992 0.044922 0.52063 0.057887 1.6386 0.19776 2.8066-0.48047 1.1678-0.67812 1.5928-1.712 1.8008-2.1934 0.11497-0.26606 0.16049-0.37226 0.17773-0.39062 0.01723-0.018368 0.10838-0.081269 0.46484-0.17773 0.35656-0.096492 0.46964-0.087296 0.49414-0.080078 0.02449 0.00722 0.10662 0.078405 0.33984 0.25 0.42203 0.31051 1.3135 0.98578 2.6641 0.98242 1.3505-0.00337 2.2454-0.68338 2.666-0.99609 0.23273-0.17302 0.31379-0.24464 0.33789-0.25195 0.02411-0.00731 0.13926-0.012632 0.49609 0.082031 0.35676 0.094642 0.44745 0.15151 0.46484 0.16992 0.01739 0.01841 0.0594 0.12495 0.17578 0.39062 0.21063 0.48084 0.64524 1.5133 1.8164 2.1855 1.1712 0.67231 2.2837 0.5254 2.8047 0.46484 0.28802-0.033487 0.39727-0.050663 0.42188-0.044922 0.0246 0.00574 0.12892 0.057957 0.39062 0.31836 0.2617 0.2604 0.31274 0.35905 0.31836 0.38281 0.0056 0.023773-0.02064 0.13333-0.05273 0.42188-0.05798 0.52147-0.1976 1.637 0.48047 2.8047 0.67812 1.1678 1.7179 1.5928 2.1992 1.8008 0.26609 0.11498 0.37412 0.15261 0.39258 0.16992 0.01844 0.01731 0.07933 0.11624 0.17578 0.47266 0.09648 0.35651 0.08927 0.46962 0.08203 0.49414-0.0073 0.02453-0.08032 0.10657-0.25195 0.33984-0.31051 0.42202-0.99164 1.3135-0.98828 2.6641 0.0034 1.3506 0.69127 2.2455 1.0039 2.666 0.17298 0.23267 0.24271 0.31577 0.25 0.33984 0.0073 0.02407 0.01461 0.13725-0.08008 0.49414-0.09465 0.35678-0.15923 0.44736-0.17773 0.46484-0.01848 0.0175-0.1172 0.06139-0.38281 0.17773-0.48071 0.21057-1.521 0.64522-2.1934 1.8164-0.67234 1.1713-0.52538 2.2838-0.46484 2.8047 0.03349 0.28802 0.05652 0.39531 0.05078 0.41992-0.0057 0.02461-0.056 0.12307-0.31641 0.38477-0.26038 0.26167-0.36085 0.31075-0.38477 0.31641-0.02392 0.0057-0.13138-0.01088-0.41992-0.04297-0.52166-0.05801-1.637-0.19952-2.8047 0.47852-1.1676 0.67802-1.6002 1.719-1.8086 2.2012-0.09505 0.21997-0.14146 0.345-0.16211 0.375-0.0078 0.01134-0.06163 0.02709-0.07422 0.03711-0.02506-0.01043-0.04775-0.01936-0.07422-0.0293-0.39901-0.15012-0.77353-0.12686-1.0332 0.0293-0.25968 0.15617-0.40837 0.34516-0.6582 0.53125-0.22497 0.16758-0.42334 0.25977-0.62695 0.42188-0.1078 0.08583-0.13134 0.27743-0.19922 0.42774-0.01406 0.0015-0.03043 0.0072-0.0293 0.0078 0.0011-5.19e-4 0.0011-0.0012-0.02148-0.05273-0.07693-0.1756-0.26331-0.43841-0.61328-0.53125-0.20104-0.05332-0.3768-0.0307-0.51758 0.0078-0.07039 0.01924-0.12868 0.04787-0.19141 0.08203-0.03137 0.01708-0.06926 0.03382-0.11133 0.07227-0.02105 0.0192-0.06446 0.07891-0.06641 0.08203-9.72e-4 0.0016-0.04249 0.08644-0.04297 0.08789-4.78e-4 0.0015-0.01566 0.12368-0.01563 0.125 3.5e-5 0.0013 0.01525 0.11022 0.01563 0.11133 3.77e-4 0.0011 0.0657 0.11042 0.06641 0.11133 0.0014 0.0018 0.14381 0.11605 0.14648 0.11719 0.0027 0.0011 0.0723 0.02559 0.0957 0.0293 0.0234 0.0037 0.04672 0.0072 0.06055 0.0078 0.05531 0.0025 0.06953-3.35e-4 0.08789 0 0.03674 6.71e-4 0.04558-3.61e-4 0.05859 0.0078 0.11488 0.07228-0.01299-0.06995 0.06641 0.11133 0.03659 0.08358 0.15732 0.37174 0.45898 0.54492 0.2858 0.16405 0.55768 0.12343 0.66406 0.11133 0.12223 0.17551 0.22708 0.38193 0.39062 0.45117 0.24961 0.10568 0.48318 0.143 0.76758 0.25 0.30905 0.11628 0.51148 0.25905 0.80469 0.33984 0.29321 0.0808 0.67005 0.01236 1.0039-0.23633 0.24494-0.18239 0.41372-0.40308 0.46484-0.65039 0.03446-0.16668-0.0068-0.29245-0.03711-0.42774 0.78889-0.45096 1.366-1.0846 1.5801-1.5801 0.12987-0.30053 0.19385-0.49304 0.28125-0.62695 0.0874-0.13391 0.2035-0.24661 0.50195-0.41992 0.29851-0.17333 0.45365-0.21892 0.61328-0.22852s0.36024 0.03024 0.68555 0.06641c0.6974 0.07753 1.8646-0.02197 2.8203-0.98242 0.95562-0.96038 1.0479-2.1307 0.9668-2.8281-0.03779-0.32515-0.07518-0.52597-0.06641-0.68555 0.0088-0.15958 0.04885-0.31391 0.2207-0.61328 0.17187-0.29941 0.28865-0.41398 0.42188-0.50195 0.13323-0.08797 0.3193-0.15576 0.61914-0.28711 0.6426-0.28148 1.6018-0.95024 1.9492-2.2598 0.34741-1.3096-0.14973-2.3588-0.56836-2.9219-0.19529-0.26267-0.32621-0.4198-0.39844-0.5625-0.07223-0.1427-0.11828-0.2974-0.11914-0.64258-8.43e-4 -0.34505 0.03969-0.49773 0.11133-0.64062 0.07164-0.1429 0.20434-0.30455 0.39844-0.56836 0.41616-0.56561 0.91438-1.6241 0.56055-2.9316-0.35385-1.3076-1.3185-1.9734-1.9629-2.2519-0.30053-0.12987-0.49304-0.1919-0.62695-0.2793-0.13391-0.087395-0.24857-0.20349-0.42188-0.50195-0.17332-0.29848-0.2111-0.45354-0.2207-0.61328-0.009603-0.15974 0.022426-0.36025 0.058594-0.68555 0.077543-0.6974-0.02007-1.8647-0.98047-2.8203-0.96035-0.95558-2.1305-1.0479-2.8281-0.9668-0.32522 0.037802-0.52011 0.075116-0.67969 0.066406s-0.31982-0.050834-0.61914-0.22266c-0.29941-0.17187-0.40617-0.28669-0.49414-0.41992-0.087972-0.13323-0.15772-0.32125-0.28906-0.62109-0.28146-0.6425-0.95603-1.6077-2.2656-1.9551-0.28592-0.075851-0.5516-0.09112-0.8125-0.095703-0.02855-0.0087601-0.05807-0.014007-0.08789-0.015625-0.0092 7.604e-5 -0.02011-1.5289e-4 -0.0293 0z" color="#000000" color-rendering="auto" dominant-baseline="auto" image-rendering="auto" shape-rendering="auto" solid-color="#000000" style="font-feature-settings:normal;font-variant-alternates:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal;font-variant-position:normal;isolation:auto;mix-blend-mode:normal;paint-order:normal;shape-padding:0;text-decoration-color:#000000;text-decoration-line:none;text-decoration-style:solid;text-indent:0;text-orientation:mixed;text-transform:none;white-space:normal"/> + <path id="hammer" transform="scale(.26458)" d="m17.178 5.8535c-0.90458-0.00858-2.5666 0.023249-2.5762 0.023437-0.36953 0-0.65625 0.32007-0.65625 0.66992v0.52539h-0.50195v-0.52539c0-0.34982-0.29651-0.66992-0.66602-0.66992h-1.8223c-0.36954 0-0.66406 0.31858-0.66406 0.66992v3.8613c0 0.35114 0.29466 0.67188 0.66406 0.67188h1.8223c0.36947 0 0.66602-0.32207 0.66602-0.67188v-0.52344h0.50195v0.52344c0 0.34986 0.28674 0.67188 0.65625 0.67188h0.40625v4.4004c-0.42951 0.35793-0.90039 0.7288-0.90039 1.3281v7.4492c0 1.07 0.87729 1.9473 1.9473 1.9473 1.07 0 1.9492-0.87714 1.9492-1.9473v-7.4492c0-0.60046-0.46946-0.97021-0.90039-1.3281v-4.4004h0.67969l0.14062-0.13281c0.2743-0.2415 0.26214-0.3581 0.39062-0.52344 0.12134-0.15616 0.30876-0.33151 0.62695-0.50195 0.97792-0.32632 1.5844-0.096178 2.0156 0.19141 0.21798 0.14536 0.38435 0.30454 0.50195 0.42774 0.0588 0.06159 0.0867 0.10559 0.16992 0.17773 0.02079 0.01803 0.04559 0.03682 0.0957 0.06641 0.02506 0.01482 0.10911 0.05004 0.11133 0.05078 0.0022 7.36e-4 0.21117 0.02363 0.21289 0.02344 8.58e-4 -8.7e-5 0.16333-0.03677 0.16406-0.03711 7.33e-4 -3.22e-4 0.13807-0.09514 0.13867-0.0957 0.0012-0.0011 0.13991-0.22055 0.14063-0.22266 7.18e-4 -0.0021 0.02688-0.09873 0.0293-0.13281 0.0024-0.03409 0.0014-0.05869 0-0.08008-0.0126-0.20888-0.08797-0.32205-0.11524-0.42188-0.32614-0.76148-0.35937-2.1118-2.2988-3.1367l-0.01367-0.00781-0.01563-0.00781c-0.44578-0.2005-0.92355-0.40081-1.3359-0.55274-0.2062-0.075966-0.39695-0.13657-0.56055-0.18555-0.1205-0.036072-0.2224-0.055177-0.33203-0.072266-0.0052-8.054e-4 -0.01037-0.00706-0.01563-0.00781-0.0243-0.00677-0.04332-0.01279-0.05859-0.015625-0.01822-0.00334-0.03075-0.00412-0.04492-0.00586-0.0096-0.00119-0.02019-0.00691-0.0293-0.00781-0.04519-0.00461-0.08879-0.00551-0.14062-0.00781-0.10369-0.00461-0.23158-0.00638-0.38281-0.00781z" style="paint-order:markers fill stroke"/> </svg> diff --git a/docs/images/logo-demo.svg b/docs/images/logo-demo.svg index 279b9088..6b78ebc3 100644 --- a/docs/images/logo-demo.svg +++ b/docs/images/logo-demo.svg @@ -1,543 +1,150 @@ -<?xml version="1.0" encoding="UTF-8" standalone="no"?> -<svg - xmlns:dc="http://purl.org/dc/elements/1.1/" - xmlns:cc="http://creativecommons.org/ns#" - xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" - xmlns:svg="http://www.w3.org/2000/svg" - xmlns="http://www.w3.org/2000/svg" - id="svg1021" - version="1.1" - viewBox="0 0 242.69724 125.92096" - height="125.92096mm" - width="242.69724mm"> - <defs - id="defs1015" /> - <metadata - id="metadata1018"> - <rdf:RDF> - <cc:Work - rdf:about=""> - <dc:format>image/svg+xml</dc:format> - <dc:type - rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> - <dc:title></dc:title> - </cc:Work> - </rdf:RDF> - </metadata> - <g - transform="translate(113.7608,-68.445953)" - id="layer1"> - <path - id="rect1633" - d="m -111.68712,133.31531 h 61.236975 c 0.871819,0 1.57368,0.70186 1.57368,1.57368 v 57.40424 c 0,0.87182 -0.701861,1.57368 -1.57368,1.57368 h -61.236975 c -0.87182,0 -1.57368,-0.70186 -1.57368,-1.57368 v -57.40424 c 0,-0.87182 0.70186,-1.57368 1.57368,-1.57368 z" - style="fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.75281364;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="rect1616" - d="m -111.68712,68.945953 h 61.236975 c 0.871819,0 1.57368,0.701862 1.57368,1.57368 v 57.404237 c 0,0.87182 -0.701861,1.57368 -1.57368,1.57368 h -61.236975 c -0.87182,0 -1.57368,-0.70186 -1.57368,-1.57368 V 70.519633 c 0,-0.871818 0.70186,-1.57368 1.57368,-1.57368 z" - style="fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.75281364;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="rect934" - transform="matrix(0.26458333,0,0,0.26458333,-113.7608,68.445953)" - d="M 129.76562 43.6875 C 125.05835 43.642806 116.15234 43.820312 116.15234 43.820312 C 115.7204 43.820312 115.37305 44.184688 115.37305 44.634766 L 115.37305 50.160156 L 107.26172 50.160156 L 107.26172 44.634766 C 107.26172 44.184643 106.91437 43.820312 106.48242 43.820312 L 96.810547 43.820312 C 96.3786 43.820312 96.03125 44.184688 96.03125 44.634766 L 96.03125 65.115234 C 96.03125 65.565357 96.37832 65.927734 96.810547 65.927734 L 106.48242 65.927734 C 106.91437 65.927734 107.26172 65.56541 107.26172 65.115234 L 107.26172 59.589844 L 115.37305 59.589844 L 115.37305 65.115234 C 115.37305 65.565357 115.72044 65.927734 116.15234 65.927734 L 121.04688 65.927734 L 121.04688 91.988281 C 118.24046 93.102134 116.26367 95.830599 116.26367 99.042969 L 116.26367 138.53711 C 116.26367 142.74482 119.65165 146.13281 123.85938 146.13281 C 128.0671 146.13281 131.45508 142.74482 131.45508 138.53711 L 131.45508 99.042969 C 131.45508 95.83111 129.47758 93.102472 126.67188 91.988281 L 126.67188 65.927734 L 131.97266 65.927734 C 132.817 65.184366 133.63076 62.20152 138.14062 59.921875 C 150.25133 55.761613 155.51023 66.069322 155.41797 64.646484 C 155.31932 63.127439 154.64637 53.599455 144.07227 48.011719 C 139.46542 45.940152 134.04892 43.904955 132.58789 43.820312 C 132.43711 43.740857 131.33472 43.702398 129.76562 43.6875 z " - style="fill:#e5b62f;fill-opacity:1;fill-rule:nonzero;stroke:#e5b62f;stroke-width:1.80452192;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="path944" - transform="matrix(0.26458333,0,0,0.26458333,-113.7608,68.445953)" - d="M 140.35352 12.013672 C 136.19864 12.082752 132.90918 13.803819 130.91406 15.287109 C 128.25391 17.264827 127.40072 18.396257 123.37695 18.40625 C 119.35319 18.416455 118.50002 17.284786 115.83008 15.320312 C 113.16014 13.355843 108.18531 10.9882 101.94922 12.675781 C 95.713131 14.363363 92.611765 18.918093 91.296875 21.960938 C 89.981989 25.00376 89.809761 26.409141 86.330078 28.429688 C 82.850392 30.450238 81.54452 29.905352 78.25 29.539062 C 74.955533 29.172751 69.457178 29.604069 64.900391 34.183594 C 60.343547 38.763115 59.935709 44.26403 60.318359 47.556641 C 60.700953 50.849271 61.253176 52.152884 59.25 55.642578 C 57.246767 59.132269 55.846811 59.308664 52.810547 60.638672 C 49.774267 61.968699 45.230748 65.087643 43.574219 71.332031 C 41.917652 77.576416 44.319275 82.539066 46.296875 85.199219 C 48.274588 87.859375 49.404047 88.712556 49.414062 92.736328 C 49.425401 96.760127 48.294525 97.613269 46.330078 100.2832 C 44.365593 102.95317 41.99795 107.92796 43.685547 114.16406 C 45.373106 120.40008 49.927824 123.50151 52.970703 124.81641 C 56.013514 126.1313 57.41888 126.30354 59.439453 129.7832 C 61.459973 133.2629 60.915151 134.56878 60.548828 137.86328 C 60.182516 141.15774 60.611602 146.6561 65.191406 151.21289 C 69.770935 155.76968 75.271827 156.16783 78.564453 155.78516 C 81.857079 155.40244 83.1607 154.86006 86.650391 156.86328 C 90.140081 158.86651 90.31841 160.26645 91.648438 163.30273 C 92.978465 166.33898 96.095482 170.8825 102.33984 172.53906 C 108.58421 174.19559 116.2409 171.03081 116.20703 169.81641 C 116.17264 168.60201 111.04679 170.69144 105.90625 167.45703 C 100.7657 164.22262 101.13333 162.22587 100 159.63867 C 98.866667 157.05143 96.839351 152.19384 91.183594 148.94727 C 85.527897 145.70065 80.31543 146.40437 77.509766 146.73047 C 74.704098 147.05664 74.516594 147.62332 71.621094 144.74219 C 68.725598 141.86102 69.301168 141.67834 69.613281 138.87109 C 69.92547 136.06381 70.596973 130.85045 67.322266 125.21094 C 64.047558 119.57143 59.184609 117.56376 56.591797 116.44336 C 53.99898 115.32292 53.555302 115.73194 52.488281 111.78906 C 51.421249 107.84611 52.001783 107.96067 53.675781 105.68555 C 55.349711 103.41046 58.545541 99.236116 58.529297 92.714844 C 58.513423 86.193511 55.296551 82.022582 53.611328 79.755859 C 51.926112 77.489118 51.341225 77.614182 52.388672 73.666016 C 53.436043 69.717853 53.887394 70.125498 56.474609 68.992188 C 59.061828 67.858877 63.919383 65.829566 67.166016 60.173828 C 70.412588 54.51809 69.70891 49.305672 69.382812 46.5 C 69.056904 43.694328 68.487984 43.506847 71.369141 40.611328 C 74.250293 37.715809 74.432971 38.281625 77.240234 38.59375 C 80.047441 38.905863 85.262831 39.589161 90.902344 36.314453 C 96.541856 33.039765 98.537762 28.176808 99.658203 25.583984 C 100.77865 22.991164 100.37935 22.547482 104.32227 21.480469 C 108.26519 20.413436 108.15068 20.992108 110.42578 22.666016 C 112.70089 24.339942 116.87716 27.537717 123.39844 27.521484 C 129.91975 27.505232 134.07897 24.286786 136.3457 22.601562 C 138.61243 20.91632 138.48738 20.331501 142.43555 21.378906 C 146.38371 22.426289 145.98778 22.877632 147.12109 25.464844 C 148.25441 28.052059 150.28177 32.90964 155.9375 36.15625 C 161.59323 39.40286 166.80762 38.699148 169.61328 38.373047 C 172.41895 38.046949 172.60643 37.480175 175.50195 40.361328 C 178.39747 43.242481 177.81994 43.425163 177.50781 46.232422 C 177.1957 49.039655 176.52412 54.253073 179.79883 59.892578 C 183.07353 65.532083 187.93648 67.528031 190.5293 68.648438 C 193.12212 69.768886 193.5658 69.371552 194.63281 73.314453 C 195.69983 77.257355 195.11923 77.142878 193.44531 79.417969 C 191.77137 81.69304 188.57558 85.867362 188.5918 92.388672 C 188.60805 98.909982 191.82454 103.0692 193.50977 105.33594 C 195.195 107.60267 195.77981 107.47956 194.73242 111.42773 C 193.68503 115.37594 193.23565 114.978 190.64844 116.11133 C 188.06122 117.24462 183.20364 119.27396 179.95703 124.92969 C 176.71043 130.58541 177.41219 135.79782 177.73828 138.60352 C 178.06438 141.40917 178.63311 141.59669 175.75195 144.49219 C 172.8708 147.38772 172.68811 146.81215 169.88086 146.5 C 167.07361 146.18781 161.86022 145.5144 156.2207 148.78906 C 150.58119 152.0638 148.57355 156.9267 147.45312 159.51953 C 146.52183 161.67468 146.61138 162.34736 144.41602 163.125 C 144.10316 162.9514 143.74177 162.78529 143.31055 162.62305 C 139.67411 161.2549 139.19558 163.10499 136.21094 165.32812 C 133.52697 167.3273 131.60788 167.56213 131.7207 170.19922 C 131.46663 170.17359 131.2285 170.16916 131.03711 170.19141 C 130.42775 170.26208 130.18685 170.36296 129.54102 169.99219 C 128.89519 169.62142 128.86138 169.36077 128.61523 168.79883 C 128.36907 168.23696 127.79232 167.3964 126.63672 167.08984 C 125.48104 166.78332 124.06401 167.36894 124.07031 167.59375 C 124.07674 167.81825 125.02523 167.43246 125.97656 168.03125 C 126.9279 168.62985 126.86058 168.99777 127.07031 169.47656 C 127.27996 169.95543 127.65451 170.8562 128.70117 171.45703 C 129.74786 172.05786 130.71318 171.92758 131.23242 171.86719 C 131.62968 171.82092 131.74614 171.75356 132.01758 171.96094 C 132.81453 174.68982 134.74217 174.38918 138.08398 175.64648 C 141.72042 177.01463 142.83177 178.66064 145.81641 176.4375 C 148.00372 174.80826 147.78373 173.63201 147.25586 171.71094 C 152.11523 169.67879 154.66507 165.82501 155.82422 163.14258 C 157.13911 160.09976 157.31134 158.69436 160.79102 156.67383 C 164.2707 154.65325 165.57856 155.19814 168.87305 155.56445 C 172.16753 155.93069 177.6639 155.49961 182.2207 150.91992 C 186.7775 146.34042 187.18737 140.83949 186.80469 137.54688 C 186.42198 134.25426 185.86788 132.95065 187.87109 129.46094 C 189.87431 125.97126 191.27429 125.7949 194.31055 124.46484 C 197.3468 123.13483 201.89229 120.01585 203.54883 113.77148 C 205.20537 107.5271 202.80389 102.56444 200.82617 99.904297 C 198.84845 97.244152 197.71704 96.390986 197.70703 92.367188 C 197.6972 88.343434 198.82655 87.490261 200.79102 84.820312 C 202.75549 82.150364 205.12509 77.17553 203.4375 70.939453 C 201.74991 64.703395 197.19517 61.601999 194.15234 60.287109 C 191.10951 58.972219 189.70219 58.801941 187.68164 55.322266 C 185.66109 51.842591 186.20792 50.534716 186.57422 47.240234 C 186.94053 43.945737 186.50922 38.447409 181.92969 33.890625 C 177.35017 29.333819 171.84926 28.925905 168.55664 29.308594 C 165.26403 29.691301 163.96039 30.245405 160.4707 28.242188 C 156.98101 26.23897 156.80463 24.838995 155.47461 21.802734 C 154.14459 18.766477 151.02563 14.221009 144.78125 12.564453 C 143.22015 12.15032 141.73847 11.990645 140.35352 12.013672 z " - style="fill:#336790;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.75552428;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <g - id="text1023" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#336790;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="SETUP"> - <path - id="path1513" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m -108.10921,126.81962 q -0.95915,0 -2.16394,-0.18715 v -1.42704 q 1.40949,0.21055 2.16394,0.21055 1.04104,0 1.04104,-1.11122 0,-0.30997 -0.15206,-0.58485 -0.14622,-0.28073 -0.41525,-0.386 -0.1696,-0.0643 -0.46788,-0.10527 -0.29827,-0.0409 -0.61409,-0.11697 -0.30997,-0.0819 -0.59655,-0.25149 -1.09952,-0.65503 -1.09952,-2.07037 0,-1.01179 0.64334,-1.70192 0.64918,-0.69012 1.66097,-0.69012 0.78955,0 2.16395,0.14621 v 1.40949 q -1.42119,-0.15206 -2.16395,-0.15206 -0.22224,0 -0.39185,0.0994 -0.16376,0.0994 -0.25148,0.25733 -0.0877,0.15791 -0.12867,0.32167 -0.0351,0.16376 -0.0351,0.32167 0,0.69012 0.46788,0.90067 0.15206,0.0702 0.69012,0.15791 0.54391,0.0819 0.87728,0.25733 0.46788,0.23979 0.772,0.61994 0.30413,0.38015 0.4211,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.70183,1.82474 -0.70182,0.69012 -1.83643,0.69012 z" /> - <path - id="path1515" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m -104.25504,118.49134 h 4.491653 v 1.40365 h -2.994433 v 1.98849 h 2.24582 v 1.40364 h -2.24582 v 2.03528 h 2.994433 v 1.40364 h -4.491653 z" /> - <path - id="path1517" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m -93.406059,118.49134 v 1.40365 h -1.871522 v 6.83105 h -1.497218 v -6.83105 h -1.871523 v -1.40365 z" /> - <path - id="path1519" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m -92.663298,118.49134 h 1.497218 v 5.24027 q 0,0.54976 0.04094,0.74861 0.08773,0.42109 0.374305,0.67842 0.292425,0.25734 0.707669,0.25734 0.415244,0 0.701821,-0.25734 0.292426,-0.25733 0.380153,-0.67842 0.04094,-0.19885 0.04094,-0.74861 v -5.24027 h 1.497218 v 5.24027 q 0,0.82464 -0.09358,1.22234 -0.19885,0.82464 -0.888974,1.34515 -0.684275,0.52052 -1.637582,0.52052 -0.953307,0 -1.64343,-0.52052 -0.684276,-0.52051 -0.883125,-1.34515 -0.09358,-0.3977 -0.09358,-1.22234 z" /> - <path - id="path1521" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m -84.808753,119.89499 v 2.38619 h 0.362607 q 0.502972,0 0.731064,-0.0468 0.397698,-0.0819 0.631639,-0.39185 0.239789,-0.30997 0.239789,-0.75446 0,-0.44448 -0.239789,-0.75446 -0.233941,-0.30997 -0.631639,-0.39185 -0.228092,-0.0468 -0.731064,-0.0468 z m -1.497218,-1.40365 h 1.848128 q 0.86558,0 1.234036,0.0877 0.830488,0.19885 1.351005,0.87143 0.526366,0.66673 0.526366,1.63758 0,0.97085 -0.526366,1.64343 -0.520517,0.66673 -1.351005,0.86558 -0.368456,0.0877 -1.234036,0.0877 h -0.35091 v 3.04122 h -1.497218 z" /> - </g> - <g - id="text1661" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#e5b62f;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="TOOLS"> - <path - id="path1524" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m -75.456982,118.49134 v 1.40365 h -1.871523 v 6.83105 h -1.497218 v -6.83105 h -1.871522 v -1.40365 z" /> - <path - id="path1526" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m -73.222853,121.09978 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105274,0.5673 0.386002,0.93576 0.286577,0.36261 0.74276,0.36261 0.456184,0 0.736912,-0.36261 0.286577,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105273,-0.56731 -0.39185,-0.92991 -0.280728,-0.36846 -0.736912,-0.36846 -0.456183,0 -0.74276,0.36846 -0.280728,0.3626 -0.386002,0.92991 z m -1.421187,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04112 0.23394,-0.94746 0.883125,-1.55571 0.649184,-0.61409 1.666824,-0.61409 1.017641,0 1.666825,0.61409 0.649184,0.60825 0.883125,1.55571 0.163758,0.64918 0.163758,2.04112 0,1.39195 -0.163758,2.04113 -0.233941,0.94746 -0.883125,1.56156 -0.649184,0.60824 -1.666825,0.60824 -1.01764,0 -1.666824,-0.60824 -0.649185,-0.6141 -0.883125,-1.56156 z" /> - <path - id="path1528" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m -67.239829,121.09978 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105273,0.5673 0.386001,0.93576 0.286577,0.36261 0.742761,0.36261 0.456183,0 0.736912,-0.36261 0.286576,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105274,-0.56731 -0.39185,-0.92991 -0.280729,-0.36846 -0.736912,-0.36846 -0.456184,0 -0.742761,0.36846 -0.280728,0.3626 -0.386001,0.92991 z m -1.421188,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04112 0.233941,-0.94746 0.883125,-1.55571 0.649184,-0.61409 1.666825,-0.61409 1.01764,0 1.666824,0.61409 0.649185,0.60825 0.883125,1.55571 0.163758,0.64918 0.163758,2.04112 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56156 -0.649184,0.60824 -1.666824,0.60824 -1.017641,0 -1.666825,-0.60824 -0.649184,-0.6141 -0.883125,-1.56156 z" /> - <path - id="path1530" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m -62.373871,126.72604 v -8.2347 h 1.497218 v 6.83106 h 3.368741 v 1.40364 z" /> - <path - id="path1532" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m -54.261991,126.81962 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222244,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25733 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16376 -0.03509,0.32167 0,0.69012 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543911,0.0819 0.877276,0.25733 0.467881,0.23979 0.772003,0.61994 0.304123,0.38015 0.421093,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836431,0.69012 z" /> - </g> - <path - id="rect905" - transform="matrix(0.26458333,0,0,0.26458333,-113.7608,68.445953)" - d="M 129.76562 286.97266 C 125.05835 286.92796 116.15234 287.10742 116.15234 287.10742 C 115.7204 287.10742 115.37305 287.46978 115.37305 287.91992 L 115.37305 293.44727 L 107.26172 293.44727 L 107.26172 287.91992 C 107.26172 287.46982 106.91437 287.10742 106.48242 287.10742 L 96.810547 287.10742 C 96.3786 287.10742 96.03125 287.46978 96.03125 287.91992 L 96.03125 308.40234 C 96.03125 308.85245 96.37832 309.21289 96.810547 309.21289 L 106.48242 309.21289 C 106.91437 309.21289 107.26172 308.85237 107.26172 308.40234 L 107.26172 302.875 L 115.37305 302.875 L 115.37305 308.40234 C 115.37305 308.85245 115.72044 309.21289 116.15234 309.21289 L 121.04688 309.21289 L 121.04688 335.27344 C 118.24046 336.38729 116.26367 339.11574 116.26367 342.32812 L 116.26367 381.82422 C 116.26367 386.03197 119.65165 389.41797 123.85938 389.41797 C 128.0671 389.41797 131.45508 386.03197 131.45508 381.82422 L 131.45508 342.32812 C 131.45508 339.11625 129.47758 336.38763 126.67188 335.27344 L 126.67188 309.21289 L 131.97266 309.21289 C 132.817 308.46953 133.63076 305.48864 138.14062 303.20898 C 150.25133 299.04875 155.51023 309.35448 155.41797 307.93164 C 155.31932 306.41261 154.64637 296.88656 144.07227 291.29883 C 139.46542 289.22727 134.04892 287.19205 132.58789 287.10742 C 132.43711 287.02796 131.33472 286.98756 129.76562 286.97266 z " - style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:1.80452192;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="path913" - transform="matrix(0.26458333,0,0,0.26458333,-113.7608,68.445953)" - d="M 140.35352 255.29883 C 136.19864 255.36791 132.90918 257.08898 130.91406 258.57227 C 128.25391 260.54998 127.40072 261.68139 123.37695 261.69141 C 119.35319 261.70274 118.50002 260.57187 115.83008 258.60742 C 113.16014 256.64294 108.18531 254.27529 101.94922 255.96289 C 95.713131 257.65045 92.611765 262.20327 91.296875 265.24609 C 89.981989 268.28892 89.809761 269.69626 86.330078 271.7168 C 82.850392 273.73737 81.54452 273.19049 78.25 272.82422 C 74.955533 272.45798 69.457178 272.88906 64.900391 277.46875 C 60.343547 282.04825 59.935709 287.54919 60.318359 290.8418 C 60.700953 294.13445 61.253176 295.43802 59.25 298.92773 C 57.246767 302.41741 55.846811 302.59381 52.810547 303.92383 C 49.774267 305.25384 45.230748 308.37277 43.574219 314.61719 C 41.917652 320.86157 44.319275 325.82423 46.296875 328.48438 C 48.274588 331.14452 49.404047 331.99772 49.414062 336.02148 C 49.425401 340.04528 48.294525 340.90042 46.330078 343.57031 C 44.365593 346.24028 41.99795 351.21311 43.685547 357.44922 C 45.373106 363.68529 49.927824 366.78663 52.970703 368.10156 C 56.013514 369.41642 57.41888 369.5887 59.439453 373.06836 C 61.459973 376.54806 60.915151 377.85589 60.548828 381.15039 C 60.182516 384.44489 60.611602 389.94122 65.191406 394.49805 C 69.770935 399.05483 75.271827 399.45295 78.564453 399.07031 C 81.857079 398.6876 83.1607 398.14517 86.650391 400.14844 C 90.140081 402.15166 90.31841 403.55164 91.648438 406.58789 C 92.978465 409.62417 96.095482 414.16761 102.33984 415.82422 C 108.58421 417.48075 116.2409 414.31792 116.20703 413.10352 C 116.17264 411.88912 111.04679 413.97656 105.90625 410.74219 C 100.7657 407.50778 101.13333 405.51103 100 402.92383 C 98.866667 400.33659 96.839351 395.479 91.183594 392.23242 C 85.527897 388.98581 80.31543 389.68953 77.509766 390.01562 C 74.704098 390.3418 74.516594 390.91043 71.621094 388.0293 C 68.725598 385.14813 69.301168 384.96545 69.613281 382.1582 C 69.92547 379.35092 70.596973 374.1356 67.322266 368.49609 C 64.047558 362.85659 59.184609 360.85087 56.591797 359.73047 C 53.99898 358.61003 53.555302 359.01905 52.488281 355.07617 C 51.421249 351.13322 52.001783 351.24583 53.675781 348.9707 C 55.349711 346.69562 58.545541 342.52127 58.529297 336 C 58.513423 329.47865 55.296551 325.3097 53.611328 323.04297 C 51.926112 320.77623 51.341225 320.89934 52.388672 316.95117 C 53.436043 313.003 53.887394 313.41064 56.474609 312.27734 C 59.061828 311.14401 63.919383 309.1167 67.166016 303.46094 C 70.412588 297.80521 69.70891 292.59081 69.382812 289.78516 C 69.056904 286.97946 68.487984 286.79202 71.369141 283.89648 C 74.250293 281.00095 74.432971 281.56679 77.240234 281.87891 C 80.047441 282.1911 85.262831 282.87431 90.902344 279.59961 C 96.541856 276.32491 98.537762 271.46193 99.658203 268.86914 C 100.77865 266.27631 100.37935 265.83262 104.32227 264.76562 C 108.26519 263.69859 108.15068 264.27921 110.42578 265.95312 C 112.70089 267.62704 116.87716 270.82285 123.39844 270.80664 C 129.91975 270.79152 134.07897 267.57389 136.3457 265.88867 C 138.61243 264.20346 138.48738 263.6186 142.43555 264.66602 C 146.38371 265.7134 145.98778 266.16276 147.12109 268.75 C 148.25441 271.3372 150.28177 276.19479 155.9375 279.44141 C 161.59323 282.68802 166.80762 281.98625 169.61328 281.66016 C 172.41895 281.33398 172.60643 280.76531 175.50195 283.64648 C 178.39747 286.52762 177.81994 286.71033 177.50781 289.51758 C 177.1957 292.32482 176.52412 297.53823 179.79883 303.17773 C 183.07353 308.81724 187.93648 310.81319 190.5293 311.93359 C 193.12212 313.05407 193.5658 312.65669 194.63281 316.59961 C 195.69983 320.54249 195.11923 320.42804 193.44531 322.70312 C 191.77137 324.97821 188.57558 329.15252 188.5918 335.67383 C 188.60805 342.19514 191.82454 346.35631 193.50977 348.62305 C 195.195 350.88978 195.77981 350.76472 194.73242 354.71289 C 193.68503 358.6611 193.23565 358.26511 190.64844 359.39844 C 188.06122 360.53173 183.20364 362.55912 179.95703 368.21484 C 176.71043 373.87057 177.41219 379.08298 177.73828 381.88867 C 178.06438 384.69433 178.63311 384.88185 175.75195 387.77734 C 172.8708 390.67284 172.68811 390.09731 169.88086 389.78516 C 167.07361 389.47297 161.86022 388.80151 156.2207 392.07617 C 150.58119 395.35091 148.57355 400.21381 147.45312 402.80664 C 146.52186 404.96171 146.61114 405.63255 144.41602 406.41016 C 144.1033 406.23668 143.74147 406.07033 143.31055 405.9082 C 139.67411 404.54006 139.19558 406.39014 136.21094 408.61328 C 133.52632 410.61294 131.60686 410.84743 131.7207 413.48633 C 131.4666 413.46069 131.22852 413.45431 131.03711 413.47656 C 130.42775 413.54724 130.18685 413.64812 129.54102 413.27734 C 128.89519 412.90657 128.86138 412.64788 128.61523 412.08594 C 128.36907 411.52407 127.79232 410.68156 126.63672 410.375 C 125.48104 410.06848 124.06401 410.6541 124.07031 410.87891 C 124.07674 411.10341 125.02523 410.71762 125.97656 411.31641 C 126.9279 411.91501 126.86058 412.28488 127.07031 412.76367 C 127.27996 413.24254 127.65451 414.14136 128.70117 414.74219 C 129.74786 415.34302 130.71318 415.21274 131.23242 415.15234 C 131.62968 415.10608 131.74614 415.03872 132.01758 415.24609 C 132.81453 417.97498 134.74217 417.67434 138.08398 418.93164 C 141.72042 420.29979 142.83177 421.94579 145.81641 419.72266 C 148.00363 418.09349 147.78368 416.91896 147.25586 414.99805 C 152.11528 412.96591 154.66506 409.11213 155.82422 406.42969 C 157.13911 403.38687 157.31134 401.97948 160.79102 399.95898 C 164.2707 397.93837 165.57856 398.4833 168.87305 398.84961 C 172.16753 399.21585 177.6639 398.78668 182.2207 394.20703 C 186.7775 389.62753 187.18737 384.12464 186.80469 380.83203 C 186.42198 377.53942 185.86788 376.2378 187.87109 372.74805 C 189.87431 369.25841 191.27429 369.08002 194.31055 367.75 C 197.3468 366.41998 201.89229 363.30298 203.54883 357.05859 C 205.20537 350.81425 202.80389 345.85155 200.82617 343.19141 C 198.84845 340.53126 197.71704 339.6781 197.70703 335.6543 C 197.6972 331.63057 198.82655 330.77544 200.79102 328.10547 C 202.75549 325.43553 205.12509 320.46072 203.4375 314.22461 C 201.74991 307.98858 197.19517 304.88908 194.15234 303.57422 C 191.10951 302.25932 189.70219 302.08708 187.68164 298.60742 C 185.66109 295.12772 186.20792 293.81985 186.57422 290.52539 C 186.94053 287.23089 186.50922 281.73452 181.92969 277.17773 C 177.35017 272.62091 171.84926 272.21107 168.55664 272.59375 C 165.26403 272.97646 163.96039 273.53053 160.4707 271.52734 C 156.98101 269.52412 156.80463 268.12414 155.47461 265.08789 C 154.14459 262.05164 151.02563 257.50809 144.78125 255.85156 C 143.22015 255.43743 141.73847 255.2758 140.35352 255.29883 z " - style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.75552428;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <g - id="text925" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="SETUP"> - <path - id="path1488" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m -108.10921,191.18897 q -0.95915,0 -2.16394,-0.18715 v -1.42703 q 1.40949,0.21054 2.16394,0.21054 1.04104,0 1.04104,-1.11121 0,-0.30998 -0.15206,-0.58486 -0.14622,-0.28072 -0.41525,-0.386 -0.1696,-0.0643 -0.46788,-0.10527 -0.29827,-0.0409 -0.61409,-0.11697 -0.30997,-0.0819 -0.59655,-0.25149 -1.09952,-0.65503 -1.09952,-2.07037 0,-1.01179 0.64334,-1.70191 0.64918,-0.69013 1.66097,-0.69013 0.78955,0 2.16395,0.14622 v 1.40949 q -1.42119,-0.15207 -2.16395,-0.15207 -0.22224,0 -0.39185,0.0994 -0.16376,0.0994 -0.25148,0.25733 -0.0877,0.15791 -0.12867,0.32167 -0.0351,0.16376 -0.0351,0.32167 0,0.69012 0.46788,0.90067 0.15206,0.0702 0.69012,0.15791 0.54391,0.0819 0.87728,0.25733 0.46788,0.23979 0.772,0.61995 0.30413,0.38015 0.4211,0.76615 0.11697,0.386 0.11697,0.80125 0,1.13461 -0.70183,1.82473 -0.70182,0.69012 -1.83643,0.69012 z" /> - <path - id="path1490" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m -104.25504,182.8607 h 4.491653 v 1.40364 h -2.994433 v 1.98849 h 2.24582 v 1.40365 h -2.24582 v 2.03528 h 2.994433 v 1.40364 h -4.491653 z" /> - <path - id="path1492" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m -93.406059,182.8607 v 1.40364 h -1.871522 v 6.83106 h -1.497218 v -6.83106 h -1.871523 v -1.40364 z" /> - <path - id="path1494" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m -92.663298,182.8607 h 1.497218 v 5.24026 q 0,0.54976 0.04094,0.74861 0.08773,0.42109 0.374305,0.67843 0.292425,0.25733 0.707669,0.25733 0.415244,0 0.701821,-0.25733 0.292426,-0.25734 0.380153,-0.67843 0.04094,-0.19885 0.04094,-0.74861 v -5.24026 h 1.497218 v 5.24026 q 0,0.82464 -0.09358,1.22234 -0.19885,0.82464 -0.888974,1.34516 -0.684275,0.52051 -1.637582,0.52051 -0.953307,0 -1.64343,-0.52051 -0.684276,-0.52052 -0.883125,-1.34516 -0.09358,-0.3977 -0.09358,-1.22234 z" /> - <path - id="path1496" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m -84.808753,184.26434 v 2.38619 h 0.362607 q 0.502972,0 0.731064,-0.0468 0.397698,-0.0819 0.631639,-0.39185 0.239789,-0.30997 0.239789,-0.75445 0,-0.44449 -0.239789,-0.75446 -0.233941,-0.30997 -0.631639,-0.39185 -0.228092,-0.0468 -0.731064,-0.0468 z m -1.497218,-1.40364 h 1.848128 q 0.86558,0 1.234036,0.0877 0.830488,0.19885 1.351005,0.87142 0.526366,0.66673 0.526366,1.63759 0,0.97085 -0.526366,1.64343 -0.520517,0.66673 -1.351005,0.86558 -0.368456,0.0877 -1.234036,0.0877 h -0.35091 v 3.04123 h -1.497218 z" /> - </g> - <g - id="text929" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="TOOLS"> - <path - id="path1499" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m -75.456982,182.8607 v 1.40364 h -1.871523 v 6.83106 h -1.497218 v -6.83106 h -1.871522 v -1.40364 z" /> - <path - id="path1501" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m -73.222853,185.46913 q -0.08773,0.47958 -0.08773,1.50892 0,1.02934 0.08773,1.50891 0.105274,0.56731 0.386002,0.93576 0.286577,0.36261 0.74276,0.36261 0.456184,0 0.736912,-0.36261 0.286577,-0.36845 0.39185,-0.93576 0.08773,-0.48542 0.08773,-1.50891 0,-1.02934 -0.08773,-1.50892 -0.105273,-0.5673 -0.39185,-0.92991 -0.280728,-0.36846 -0.736912,-0.36846 -0.456183,0 -0.74276,0.36846 -0.280728,0.36261 -0.386002,0.92991 z m -1.421187,3.55005 q -0.163758,-0.64919 -0.163758,-2.04113 0,-1.39195 0.163758,-2.04113 0.23394,-0.94746 0.883125,-1.5557 0.649184,-0.6141 1.666824,-0.6141 1.017641,0 1.666825,0.6141 0.649184,0.60824 0.883125,1.5557 0.163758,0.64918 0.163758,2.04113 0,1.39194 -0.163758,2.04113 -0.233941,0.94746 -0.883125,1.56155 -0.649184,0.60824 -1.666825,0.60824 -1.01764,0 -1.666824,-0.60824 -0.649185,-0.61409 -0.883125,-1.56155 z" /> - <path - id="path1503" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m -67.239829,185.46913 q -0.08773,0.47958 -0.08773,1.50892 0,1.02934 0.08773,1.50891 0.105273,0.56731 0.386001,0.93576 0.286577,0.36261 0.742761,0.36261 0.456183,0 0.736912,-0.36261 0.286576,-0.36845 0.39185,-0.93576 0.08773,-0.48542 0.08773,-1.50891 0,-1.02934 -0.08773,-1.50892 -0.105274,-0.5673 -0.39185,-0.92991 -0.280729,-0.36846 -0.736912,-0.36846 -0.456184,0 -0.742761,0.36846 -0.280728,0.36261 -0.386001,0.92991 z m -1.421188,3.55005 q -0.163758,-0.64919 -0.163758,-2.04113 0,-1.39195 0.163758,-2.04113 0.233941,-0.94746 0.883125,-1.5557 0.649184,-0.6141 1.666825,-0.6141 1.01764,0 1.666824,0.6141 0.649185,0.60824 0.883125,1.5557 0.163758,0.64918 0.163758,2.04113 0,1.39194 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60824 -1.666824,0.60824 -1.017641,0 -1.666825,-0.60824 -0.649184,-0.61409 -0.883125,-1.56155 z" /> - <path - id="path1505" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m -62.373871,191.0954 v -8.2347 h 1.497218 v 6.83106 h 3.368741 v 1.40364 z" /> - <path - id="path1507" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m -54.261991,191.18897 q -0.959155,0 -2.163948,-0.18715 v -1.42703 q 1.40949,0.21054 2.163948,0.21054 1.041034,0 1.041034,-1.11121 0,-0.30998 -0.152061,-0.58486 -0.146213,-0.28072 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70191 0.649184,-0.69013 1.660976,-0.69013 0.789548,0 2.163948,0.14622 v 1.40949 q -1.421188,-0.15207 -2.163948,-0.15207 -0.222244,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25733 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16376 -0.03509,0.32167 0,0.69012 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543911,0.0819 0.877276,0.25733 0.467881,0.23979 0.772003,0.61995 0.304123,0.38015 0.421093,0.76615 0.11697,0.386 0.11697,0.80125 0,1.13461 -0.701821,1.82473 -0.701821,0.69012 -1.836431,0.69012 z" /> - </g> - <path - id="rect830" - d="m -34.067609,68.945953 h 61.236978 c 0.871819,0 1.573681,0.701862 1.573681,1.573681 v 57.404236 c 0,0.87182 -0.701862,1.57368 -1.573681,1.57368 h -61.236978 c -0.871818,0 -1.57368,-0.70186 -1.57368,-1.57368 V 70.519634 c 0,-0.871819 0.701862,-1.573681 1.57368,-1.573681 z" - style="fill:#336790;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.75281364;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="rect935" - transform="matrix(0.26458333,0,0,0.26458333,-113.7608,68.445953)" - d="M 423.13086 43.6875 C 418.42358 43.642806 409.51758 43.820312 409.51758 43.820312 C 409.08563 43.820312 408.73828 44.184688 408.73828 44.634766 L 408.73828 50.160156 L 400.62695 50.160156 L 400.62695 44.634766 C 400.62695 44.184643 400.2796 43.820312 399.84766 43.820312 L 390.17578 43.820312 C 389.74383 43.820312 389.39648 44.184688 389.39648 44.634766 L 389.39648 65.115234 C 389.39648 65.565357 389.74355 65.927734 390.17578 65.927734 L 399.84766 65.927734 C 400.2796 65.927734 400.62695 65.56541 400.62695 65.115234 L 400.62695 59.589844 L 408.73828 59.589844 L 408.73828 65.115234 C 408.73828 65.565357 409.08567 65.927734 409.51758 65.927734 L 414.41211 65.927734 L 414.41211 91.988281 C 411.60569 93.102134 409.62891 95.830602 409.62891 99.042969 L 409.62891 138.53711 C 409.62891 142.74482 413.01688 146.13281 417.22461 146.13281 C 421.43234 146.13281 424.81836 142.74482 424.81836 138.53711 L 424.81836 99.042969 C 424.81836 95.830462 422.84177 93.102041 420.03516 91.988281 L 420.03516 65.927734 L 425.33594 65.927734 C 426.18028 65.184366 426.99599 62.20152 431.50586 59.921875 C 443.61656 55.761613 448.87546 66.069322 448.7832 64.646484 C 448.68456 63.127439 448.01161 53.599455 437.4375 48.011719 C 432.83066 45.940152 427.41416 43.904955 425.95312 43.820312 C 425.80234 43.740857 424.69995 43.702398 423.13086 43.6875 z " - style="fill:#e5b62f;fill-opacity:1;fill-rule:nonzero;stroke:#e5b62f;stroke-width:1.80452192;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="path943" - transform="matrix(0.26458333,0,0,0.26458333,-113.7608,68.445953)" - d="M 433.71875 12.013672 C 429.56388 12.082752 426.27441 13.803819 424.2793 15.287109 C 421.61915 17.264827 420.76596 18.396257 416.74219 18.40625 C 412.71843 18.416455 411.8633 17.284786 409.19336 15.320312 C 406.52342 13.355843 401.55054 10.9882 395.31445 12.675781 C 389.07836 14.363363 385.977 18.918093 384.66211 21.960938 C 383.34722 25.00376 383.175 26.409141 379.69531 28.429688 C 376.21563 30.450238 374.9078 29.905352 371.61328 29.539062 C 368.31881 29.172751 362.82241 29.604069 358.26562 34.183594 C 353.70878 38.763115 353.29899 44.26403 353.68164 47.556641 C 354.06423 50.849271 354.61841 52.152884 352.61523 55.642578 C 350.612 59.132269 349.21205 59.308664 346.17578 60.638672 C 343.13952 61.968699 338.596 65.087643 336.93945 71.332031 C 335.2829 77.576416 337.68253 82.539066 339.66016 85.199219 C 341.63784 87.859375 342.76929 88.712556 342.7793 92.736328 C 342.78912 96.760127 341.65975 97.613269 339.69531 100.2832 C 337.73082 102.95317 335.3632 107.92796 337.05078 114.16406 C 338.73836 120.40011 343.29112 123.50151 346.33398 124.81641 C 349.3768 126.1313 350.78411 126.30354 352.80469 129.7832 C 354.82521 133.2629 354.28039 134.56878 353.91406 137.86328 C 353.54775 141.15774 353.97684 146.6561 358.55664 151.21289 C 363.13617 155.76968 368.63706 156.16783 371.92969 155.78516 C 375.22231 155.40244 376.52593 154.86006 380.01562 156.86328 C 383.50532 158.86651 383.68364 160.26645 385.01367 163.30273 C 386.3437 166.33898 389.46072 170.8825 395.70508 172.53906 C 401.94944 174.19559 409.60613 171.03081 409.57227 169.81641 C 409.53787 168.60201 404.41203 170.69144 399.27148 167.45703 C 394.13094 164.22262 394.49856 162.22587 393.36523 159.63867 C 392.2319 157.05143 390.20458 152.19384 384.54883 148.94727 C 378.89313 145.70065 373.67871 146.40437 370.87305 146.73047 C 368.06738 147.05664 367.87987 147.62527 364.98438 144.74414 C 362.08888 141.86297 362.6664 141.67834 362.97852 138.87109 C 363.2907 136.06381 363.96221 130.85045 360.6875 125.21094 C 357.41279 119.57143 352.54984 117.56376 349.95703 116.44336 C 347.36421 115.32292 346.92054 115.73194 345.85352 111.78906 C 344.7865 107.84611 345.36703 107.96067 347.04102 105.68555 C 348.71495 103.41046 351.91078 99.236116 351.89453 92.714844 C 351.87866 86.193511 348.66179 82.022582 346.97656 79.755859 C 345.29134 77.489118 344.70453 77.614182 345.75195 73.666016 C 346.79932 69.717853 347.25068 70.125498 349.83789 68.992188 C 352.42511 67.858877 357.28462 65.829566 360.53125 60.173828 C 363.77782 54.51809 363.07414 49.305672 362.74805 46.5 C 362.42214 43.694328 361.85322 43.506847 364.73438 40.611328 C 367.61553 37.715809 367.79821 38.281625 370.60547 38.59375 C 373.41268 38.905863 378.62807 39.589161 384.26758 36.314453 C 389.90709 33.039765 391.903 28.176808 393.02344 25.583984 C 394.14388 22.991164 393.74458 22.547482 397.6875 21.480469 C 401.63043 20.413436 401.51591 20.992108 403.79102 22.666016 C 406.06612 24.339942 410.24044 27.537717 416.76172 27.521484 C 423.28303 27.505232 427.4442 24.286786 429.71094 22.601562 C 431.97767 20.91632 431.85261 20.331501 435.80078 21.378906 C 439.74895 22.426289 439.35301 22.877632 440.48633 25.464844 C 441.61964 28.052059 443.647 32.90964 449.30273 36.15625 C 454.95846 39.40286 460.17286 38.699148 462.97852 38.373047 C 465.78418 38.046949 465.97167 37.480175 468.86719 40.361328 C 471.76271 43.242481 471.18517 43.425163 470.87305 46.232422 C 470.56093 49.039655 469.88936 54.253073 473.16406 59.892578 C 476.43876 65.532083 481.30171 67.528031 483.89453 68.648438 C 486.48736 69.768886 486.93103 69.371552 487.99805 73.314453 C 489.06506 77.257355 488.48446 77.142878 486.81055 79.417969 C 485.13661 81.69304 481.94081 85.867362 481.95703 92.388672 C 481.97328 98.909982 485.18977 103.0692 486.875 105.33594 C 488.56023 107.60267 489.14505 107.47956 488.09766 111.42773 C 487.05027 115.37594 486.60088 114.978 484.01367 116.11133 C 481.42645 117.24462 476.56888 119.27396 473.32227 124.92969 C 470.07566 130.58541 470.77742 135.79782 471.10352 138.60352 C 471.42961 141.40917 471.99834 141.59669 469.11719 144.49219 C 466.23603 147.38772 466.05334 146.81215 463.24609 146.5 C 460.43884 146.18781 455.2235 145.5144 449.58398 148.78906 C 443.94448 152.0638 441.93879 156.9267 440.81836 159.51953 C 439.88706 161.67468 439.97662 162.34736 437.78125 163.125 C 437.46839 162.9514 437.10701 162.78529 436.67578 162.62305 C 433.03935 161.2549 432.56081 163.10499 429.57617 165.32812 C 426.89222 167.32729 424.97118 167.56216 425.08398 170.19922 C 424.83063 170.17382 424.59327 170.16921 424.40234 170.19141 C 423.79298 170.26208 423.55013 170.36296 422.9043 169.99219 C 422.25847 169.62142 422.22661 169.36077 421.98047 168.79883 C 421.73431 168.23696 421.15756 167.3964 420.00195 167.08984 C 418.84628 166.78332 417.42924 167.36894 417.43555 167.59375 C 417.44197 167.81825 418.39047 167.43246 419.3418 168.03125 C 420.29313 168.62985 420.22581 168.99777 420.43555 169.47656 C 420.6452 169.95543 421.01974 170.8562 422.06641 171.45703 C 423.11309 172.05786 424.07842 171.92758 424.59766 171.86719 C 424.99492 171.82092 425.11138 171.75356 425.38281 171.96094 C 426.17977 174.68982 428.1074 174.38918 431.44922 175.64648 C 435.08565 177.01463 436.197 178.66064 439.18164 176.4375 C 441.36896 174.80826 441.14897 173.63201 440.62109 171.71094 C 445.48047 169.67879 448.0303 165.82501 449.18945 163.14258 C 450.50435 160.09976 450.67657 158.69436 454.15625 156.67383 C 457.63594 154.65325 458.9438 155.19814 462.23828 155.56445 C 465.53277 155.93069 471.02914 155.49961 475.58594 150.91992 C 480.14274 146.34042 480.55261 140.83949 480.16992 137.54688 C 479.78721 134.25426 479.23311 132.95065 481.23633 129.46094 C 483.23955 125.97126 484.63953 125.7949 487.67578 124.46484 C 490.71204 123.13483 495.25557 120.01588 496.91211 113.77148 C 498.56865 107.5271 496.16912 102.56444 494.19141 99.904297 C 492.21368 97.244152 491.08227 96.390986 491.07227 92.367188 C 491.06244 88.343434 492.19178 87.490261 494.15625 84.820312 C 496.12072 82.150364 498.48837 77.17553 496.80078 70.939453 C 495.1132 64.703395 490.56041 61.601999 487.51758 60.287109 C 484.47474 58.972219 483.06743 58.801941 481.04688 55.322266 C 479.02633 51.842591 479.57315 50.534716 479.93945 47.240234 C 480.30576 43.945737 479.87445 38.447409 475.29492 33.890625 C 470.7154 29.333819 465.2145 28.925905 461.92188 29.308594 C 458.62926 29.691301 457.32563 30.245405 453.83594 28.242188 C 450.34624 26.23897 450.16986 24.838995 448.83984 21.802734 C 447.50983 18.766477 444.39086 14.221009 438.14648 12.564453 C 436.58539 12.15032 435.10371 11.990645 433.71875 12.013672 z " - style="fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.75552428;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <g - id="text955" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="SETUP"> - <path - id="path1476" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m -30.489692,126.81963 q -0.959156,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10528 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25148 -1.099519,-0.65504 -1.099519,-2.07037 0,-1.0118 0.643335,-1.70192 0.649185,-0.69012 1.660977,-0.69012 0.789548,0 2.163947,0.14621 v 1.40949 q -1.421187,-0.15206 -2.163947,-0.15206 -0.222244,0 -0.39185,0.0994 -0.163759,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152061,0.0702 0.690124,0.15791 0.543911,0.0819 0.877276,0.25734 0.467881,0.23979 0.772003,0.61994 0.304123,0.38015 0.421093,0.76615 0.11697,0.38601 0.11697,0.80125 0,1.13461 -0.701821,1.82473 -0.701821,0.69013 -1.836431,0.69013 z" /> - <path - id="path1478" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m -26.635526,118.49135 h 4.491654 v 1.40364 h -2.994436 v 1.9885 h 2.245827 v 1.40364 h -2.245827 v 2.03528 h 2.994436 v 1.40364 h -4.491654 z" /> - <path - id="path1480" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m -15.786544,118.49135 v 1.40364 h -1.871523 v 6.83106 h -1.497218 v -6.83106 h -1.871522 v -1.40364 z" /> - <path - id="path1482" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m -15.043784,118.49135 h 1.497218 v 5.24027 q 0,0.54976 0.04094,0.7486 0.08773,0.4211 0.374304,0.67843 0.292426,0.25734 0.70767,0.25734 0.415244,0 0.701821,-0.25734 0.292425,-0.25733 0.380153,-0.67843 0.04094,-0.19884 0.04094,-0.7486 v -5.24027 h 1.4972182 v 5.24027 q 0,0.82463 -0.093576,1.22233 -0.198849,0.82464 -0.888973,1.34516 -0.684276,0.52052 -1.637582,0.52052 -0.953307,0 -1.643431,-0.52052 -0.684275,-0.52052 -0.883125,-1.34516 -0.09358,-0.3977 -0.09358,-1.22233 z" /> - <path - id="path1484" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m -7.1892386,119.89499 v 2.3862 h 0.3626075 q 0.5029717,0 0.7310635,-0.0468 0.3976985,-0.0819 0.6316388,-0.39185 0.2397888,-0.30997 0.2397888,-0.75446 0,-0.44449 -0.2397888,-0.75446 -0.2339403,-0.30997 -0.6316388,-0.39185 -0.2280918,-0.0468 -0.7310635,-0.0468 z m -1.497218,-1.40364 h 1.8481285 q 0.8655791,0 1.2340351,0.0877 0.8304881,0.19885 1.3510053,0.87143 0.5263657,0.66673 0.5263657,1.63758 0,0.97085 -0.5263657,1.64343 -0.5205172,0.66673 -1.3510053,0.86558 -0.368456,0.0877 -1.2340351,0.0877 h -0.3509105 v 3.04122 h -1.497218 z" /> - </g> - <g - id="text959" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#e5b62f;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="TOOLS"> - <path - id="path1465" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 2.1625267,118.49135 v 1.40364 H 0.29100422 v 6.83106 H -1.2062138 v -6.83106 h -1.8715224 v -1.40364 z" /> - <path - id="path1467" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 4.3966564,121.09979 q -0.087728,0.47957 -0.087728,1.50891 0,1.02934 0.087728,1.50892 0.1052732,0.5673 0.3860015,0.93576 0.2865769,0.36261 0.7427605,0.36261 0.4561836,0 0.736912,-0.36261 0.2865768,-0.36846 0.39185,-0.93576 0.087728,-0.48543 0.087728,-1.50892 0,-1.02934 -0.087728,-1.50891 -0.1052732,-0.56731 -0.39185,-0.92992 -0.2807284,-0.36845 -0.736912,-0.36845 -0.4561836,0 -0.7427605,0.36845 -0.2807283,0.36261 -0.3860015,0.92992 z m -1.4211874,3.55004 q -0.1637582,-0.64918 -0.1637582,-2.04113 0,-1.39194 0.1637582,-2.04113 0.2339403,-0.94746 0.8831247,-1.5557 0.6491844,-0.61409 1.6668247,-0.61409 1.0176403,0 1.6668247,0.61409 0.6491844,0.60824 0.8831247,1.5557 0.1637582,0.64919 0.1637582,2.04113 0,1.39195 -0.1637582,2.04113 -0.2339403,0.94746 -0.8831247,1.56155 -0.6491844,0.60825 -1.6668247,0.60825 -1.0176403,0 -1.6668247,-0.60825 -0.6491844,-0.61409 -0.8831247,-1.56155 z" /> - <path - id="path1469" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 10.37968,121.09979 q -0.08773,0.47957 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105273,0.5673 0.386001,0.93576 0.286577,0.36261 0.742761,0.36261 0.456183,0 0.736912,-0.36261 0.286576,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02934 -0.08773,-1.50891 -0.105274,-0.56731 -0.39185,-0.92992 -0.280729,-0.36845 -0.736912,-0.36845 -0.456184,0 -0.742761,0.36845 -0.280728,0.36261 -0.386001,0.92992 z m -1.4211878,3.55004 Q 8.794734,124.00065 8.794734,122.6087 q 0,-1.39194 0.1637582,-2.04113 0.2339403,-0.94746 0.8831247,-1.5557 0.6491841,-0.61409 1.6668251,-0.61409 1.01764,0 1.666824,0.61409 0.649185,0.60824 0.883125,1.5557 0.163758,0.64919 0.163758,2.04113 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60825 -1.666824,0.60825 -1.017641,0 -1.6668251,-0.60825 -0.6491844,-0.61409 -0.8831247,-1.56155 z" /> - <path - id="path1471" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 15.245638,126.72605 v -8.2347 h 1.497218 v 6.83106 h 3.368741 v 1.40364 z" /> - <path - id="path1473" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 23.357518,126.81963 q -0.959156,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10528 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25148 -1.099519,-0.65504 -1.099519,-2.07037 0,-1.0118 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222244,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152061,0.0702 0.690124,0.15791 0.543911,0.0819 0.877276,0.25734 0.467881,0.23979 0.772003,0.61994 0.304123,0.38015 0.421093,0.76615 0.11697,0.38601 0.11697,0.80125 0,1.13461 -0.701821,1.82473 -0.701821,0.69013 -1.836431,0.69013 z" /> - </g> - <path - id="rect1036" - d="m -34.067609,133.31531 h 61.236978 c 0.871819,0 1.573681,0.70186 1.573681,1.57368 v 57.40424 c 0,0.87182 -0.701862,1.57368 -1.573681,1.57368 h -61.236978 c -0.871818,0 -1.57368,-0.70186 -1.57368,-1.57368 v -57.40424 c 0,-0.87182 0.701862,-1.57368 1.57368,-1.57368 z" - style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.75281364;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="rect1038" - transform="matrix(0.26458333,0,0,0.26458333,-113.7608,68.445953)" - d="M 423.13086 286.97266 C 418.42358 286.92796 409.51758 287.10742 409.51758 287.10742 C 409.08563 287.10742 408.73828 287.46978 408.73828 287.91992 L 408.73828 293.44727 L 400.62695 293.44727 L 400.62695 287.91992 C 400.62695 287.46982 400.2796 287.10742 399.84766 287.10742 L 390.17578 287.10742 C 389.74383 287.10742 389.39648 287.46978 389.39648 287.91992 L 389.39648 308.40234 C 389.39648 308.85249 389.74355 309.21289 390.17578 309.21289 L 399.84766 309.21289 C 400.2796 309.21289 400.62695 308.85237 400.62695 308.40234 L 400.62695 302.875 L 408.73828 302.875 L 408.73828 308.40234 C 408.73828 308.85249 409.08567 309.21289 409.51758 309.21289 L 414.41211 309.21289 L 414.41211 335.27344 C 411.60569 336.3873 409.62891 339.11577 409.62891 342.32812 L 409.62891 381.82422 C 409.62891 386.03193 413.01688 389.41797 417.22461 389.41797 C 421.43234 389.41797 424.81836 386.03193 424.81836 381.82422 L 424.81836 342.32812 C 424.81836 339.11563 422.84177 336.3872 420.03516 335.27344 L 420.03516 309.21289 L 425.33594 309.21289 C 426.18028 308.46953 426.99599 305.48861 431.50586 303.20898 C 443.61656 299.04871 448.87546 309.35448 448.7832 307.93164 C 448.68456 306.41261 448.01161 296.88656 437.4375 291.29883 C 432.83066 289.22727 427.41416 287.19205 425.95312 287.10742 C 425.80234 287.02796 424.69995 286.98756 423.13086 286.97266 z " - style="fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:#ffffff;stroke-width:1.80452192;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="path1046" - transform="matrix(0.26458333,0,0,0.26458333,-113.7608,68.445953)" - d="M 433.71875 255.29883 C 429.56388 255.36791 426.27441 257.08898 424.2793 258.57227 C 421.61915 260.54998 420.76596 261.68139 416.74219 261.69141 C 412.71843 261.70274 411.8633 260.57187 409.19336 258.60742 C 406.52342 256.64294 401.55054 254.27529 395.31445 255.96289 C 389.07836 257.65049 385.977 262.20327 384.66211 265.24609 C 383.34722 268.28892 383.175 269.69626 379.69531 271.7168 C 376.21563 273.73737 374.9078 273.19049 371.61328 272.82422 C 368.31881 272.45798 362.82241 272.88906 358.26562 277.46875 C 353.70878 282.04825 353.29899 287.54919 353.68164 290.8418 C 354.06423 294.13441 354.61841 295.43802 352.61523 298.92773 C 350.612 302.41741 349.21205 302.59381 346.17578 303.92383 C 343.13952 305.25384 338.596 308.3728 336.93945 314.61719 C 335.2829 320.86157 337.68253 325.82423 339.66016 328.48438 C 341.63784 331.14452 342.76929 331.99769 342.7793 336.02148 C 342.78912 340.04528 341.65975 340.90038 339.69531 343.57031 C 337.73082 346.24028 335.3632 351.21311 337.05078 357.44922 C 338.73836 363.68525 343.29112 366.78666 346.33398 368.10156 C 349.3768 369.41646 350.78411 369.5887 352.80469 373.06836 C 354.82521 376.54806 354.28039 377.85589 353.91406 381.15039 C 353.54775 384.44485 353.97684 389.94126 358.55664 394.49805 C 363.13617 399.05483 368.63706 399.45299 371.92969 399.07031 C 375.22231 398.6876 376.52593 398.14521 380.01562 400.14844 C 383.50532 402.15166 383.68364 403.55161 385.01367 406.58789 C 386.3437 409.62414 389.46072 414.16765 395.70508 415.82422 C 401.94944 417.48075 409.60613 414.31792 409.57227 413.10352 C 409.53787 411.88912 404.41203 413.97659 399.27148 410.74219 C 394.13094 407.50778 394.49856 405.51103 393.36523 402.92383 C 392.2319 400.33659 390.20458 395.479 384.54883 392.23242 C 378.89313 388.98581 373.67871 389.68953 370.87305 390.01562 C 368.06738 390.3418 367.87987 390.91043 364.98438 388.0293 C 362.08888 385.14813 362.6664 384.96545 362.97852 382.1582 C 363.2907 379.35092 363.96221 374.1356 360.6875 368.49609 C 357.41279 362.85659 352.54984 360.85087 349.95703 359.73047 C 347.36421 358.61003 346.92054 359.01905 345.85352 355.07617 C 344.7865 351.13322 345.36703 351.24583 347.04102 348.9707 C 348.71495 346.69562 351.91078 342.52127 351.89453 336 C 351.87866 329.47865 348.66179 325.3097 346.97656 323.04297 C 345.29134 320.77623 344.70453 320.89934 345.75195 316.95117 C 346.79932 313.003 347.25068 313.41064 349.83789 312.27734 C 352.42511 311.14401 357.28462 309.11666 360.53125 303.46094 C 363.77782 297.80518 363.07414 292.59081 362.74805 289.78516 C 362.42214 286.97946 361.85322 286.79202 364.73438 283.89648 C 367.61553 281.00099 367.79821 281.56676 370.60547 281.87891 C 373.41268 282.1911 378.62807 282.87431 384.26758 279.59961 C 389.90709 276.32491 391.903 271.46193 393.02344 268.86914 C 394.14388 266.27631 393.74458 265.83262 397.6875 264.76562 C 401.63043 263.69859 401.51591 264.27921 403.79102 265.95312 C 406.06612 267.62704 410.24044 270.82289 416.76172 270.80664 C 423.28303 270.79152 427.4442 267.57393 429.71094 265.88867 C 431.97767 264.20346 431.85261 263.6186 435.80078 264.66602 C 439.74895 265.7134 439.35301 266.16276 440.48633 268.75 C 441.61964 271.3372 443.647 276.19479 449.30273 279.44141 C 454.95846 282.68802 460.17286 281.98625 462.97852 281.66016 C 465.78418 281.33398 465.97167 280.76531 468.86719 283.64648 C 471.76271 286.52762 471.18517 286.71033 470.87305 289.51758 C 470.56093 292.32482 469.88936 297.53826 473.16406 303.17773 C 476.43876 308.81724 481.30171 310.81315 483.89453 311.93359 C 486.48736 313.05403 486.93103 312.65669 487.99805 316.59961 C 489.06506 320.54253 488.48446 320.42804 486.81055 322.70312 C 485.13661 324.97821 481.94081 329.15252 481.95703 335.67383 C 481.97328 342.19514 485.18977 346.35631 486.875 348.62305 C 488.56023 350.88978 489.14505 350.76472 488.09766 354.71289 C 487.05027 358.6611 486.60088 358.26511 484.01367 359.39844 C 481.42645 360.53173 476.56888 362.55912 473.32227 368.21484 C 470.07566 373.87057 470.77742 379.08298 471.10352 381.88867 C 471.42961 384.69433 471.99834 384.88185 469.11719 387.77734 C 466.23603 390.67288 466.05334 390.09731 463.24609 389.78516 C 460.43884 389.47297 455.2235 388.80151 449.58398 392.07617 C 443.94448 395.35091 441.93879 400.21381 440.81836 402.80664 C 439.8871 404.96171 439.97637 405.63255 437.78125 406.41016 C 437.46854 406.23668 437.1067 406.07033 436.67578 405.9082 C 433.03935 404.54006 432.56081 406.39014 429.57617 408.61328 C 426.89222 410.61245 424.97118 410.84732 425.08398 413.48438 C 424.83072 413.459 424.59322 413.45438 424.40234 413.47656 C 423.79298 413.54724 423.55013 413.64812 422.9043 413.27734 C 422.25847 412.90657 422.22661 412.64788 421.98047 412.08594 C 421.73431 411.52407 421.15756 410.68156 420.00195 410.375 C 418.84628 410.06848 417.42924 410.6541 417.43555 410.87891 C 417.44197 411.10341 418.39047 410.71762 419.3418 411.31641 C 420.29313 411.91501 420.22581 412.28488 420.43555 412.76367 C 420.6452 413.24254 421.01974 414.14136 422.06641 414.74219 C 423.11309 415.34302 424.07842 415.21274 424.59766 415.15234 C 424.99492 415.10608 425.11138 415.03872 425.38281 415.24609 C 426.17977 417.97498 428.1074 417.67434 431.44922 418.93164 C 435.08565 420.29979 436.197 421.94579 439.18164 419.72266 C 441.36886 418.09349 441.14891 416.91896 440.62109 414.99805 C 445.48047 412.9659 448.0303 409.11211 449.18945 406.42969 C 450.50435 403.38687 450.67657 401.97952 454.15625 399.95898 C 457.63594 397.93841 458.9438 398.4833 462.23828 398.84961 C 465.53277 399.21585 471.02914 398.78672 475.58594 394.20703 C 480.14274 389.62753 480.55261 384.1266 480.16992 380.83398 C 479.78721 377.54137 479.23311 376.23776 481.23633 372.74805 C 483.23955 369.25837 484.63953 369.08005 487.67578 367.75 C 490.71204 366.41998 495.25557 363.30298 496.91211 357.05859 C 498.56865 350.81421 496.16912 345.85155 494.19141 343.19141 C 492.21368 340.53126 491.08227 339.6781 491.07227 335.6543 C 491.06244 331.63054 492.19178 330.77544 494.15625 328.10547 C 496.12072 325.43553 498.48837 320.46072 496.80078 314.22461 C 495.1132 307.98858 490.56041 304.88908 487.51758 303.57422 C 484.47474 302.25932 483.06743 302.08708 481.04688 298.60742 C 479.02633 295.12772 479.57315 293.81985 479.93945 290.52539 C 480.30576 287.23089 479.87445 281.73452 475.29492 277.17773 C 470.7154 272.62091 465.2145 272.21107 461.92188 272.59375 C 458.62926 272.97646 457.32563 273.53057 453.83594 271.52734 C 450.34624 269.52412 450.16986 268.12414 448.83984 265.08789 C 447.50983 262.05164 444.39086 257.50813 438.14648 255.85156 C 436.58539 255.43743 435.10371 255.2758 433.71875 255.29883 z " - style="fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.75552428;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <g - id="text1058" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="SETUP"> - <path - id="path1450" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m -30.489692,191.18897 q -0.959156,0 -2.163948,-0.18715 v -1.42703 q 1.40949,0.21054 2.163948,0.21054 1.041034,0 1.041034,-1.11121 0,-0.30998 -0.152061,-0.58486 -0.146213,-0.28072 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643335,-1.70191 0.649185,-0.69013 1.660977,-0.69013 0.789548,0 2.163947,0.14622 v 1.40949 q -1.421187,-0.15207 -2.163947,-0.15207 -0.222244,0 -0.39185,0.0994 -0.163759,0.0994 -0.251486,0.25733 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16376 -0.03509,0.32167 0,0.69012 0.46788,0.90067 0.152061,0.0702 0.690124,0.15791 0.543911,0.0819 0.877276,0.25733 0.467881,0.23979 0.772003,0.61995 0.304123,0.38015 0.421093,0.76615 0.11697,0.386 0.11697,0.80125 0,1.13461 -0.701821,1.82473 -0.701821,0.69012 -1.836431,0.69012 z" /> - <path - id="path1452" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m -26.635526,182.8607 h 4.491654 v 1.40364 h -2.994436 v 1.98849 h 2.245827 v 1.40365 h -2.245827 v 2.03528 h 2.994436 v 1.40364 h -4.491654 z" /> - <path - id="path1454" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m -15.786544,182.8607 v 1.40364 h -1.871523 v 6.83106 h -1.497218 v -6.83106 h -1.871522 v -1.40364 z" /> - <path - id="path1456" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m -15.043784,182.8607 h 1.497218 v 5.24026 q 0,0.54976 0.04094,0.74861 0.08773,0.42109 0.374304,0.67843 0.292426,0.25733 0.70767,0.25733 0.415244,0 0.701821,-0.25733 0.292425,-0.25734 0.380153,-0.67843 0.04094,-0.19885 0.04094,-0.74861 v -5.24026 h 1.4972182 v 5.24026 q 0,0.82464 -0.093576,1.22234 -0.198849,0.82464 -0.888973,1.34516 -0.684276,0.52051 -1.637582,0.52051 -0.953307,0 -1.643431,-0.52051 -0.684275,-0.52052 -0.883125,-1.34516 -0.09358,-0.3977 -0.09358,-1.22234 z" /> - <path - id="path1458" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m -7.1892386,184.26434 v 2.38619 h 0.3626075 q 0.5029717,0 0.7310635,-0.0468 0.3976985,-0.0819 0.6316388,-0.39185 0.2397888,-0.30997 0.2397888,-0.75445 0,-0.44449 -0.2397888,-0.75446 -0.2339403,-0.30997 -0.6316388,-0.39185 -0.2280918,-0.0468 -0.7310635,-0.0468 z m -1.497218,-1.40364 h 1.8481285 q 0.8655791,0 1.2340351,0.0877 0.8304881,0.19885 1.3510053,0.87142 0.5263657,0.66673 0.5263657,1.63759 0,0.97085 -0.5263657,1.64343 -0.5205172,0.66673 -1.3510053,0.86558 -0.368456,0.0877 -1.2340351,0.0877 h -0.3509105 v 3.04123 h -1.497218 z" /> - </g> - <g - id="text1062" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="TOOLS"> - <path - id="path1439" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 2.1625267,182.8607 v 1.40364 H 0.29100422 v 6.83106 H -1.2062138 v -6.83106 h -1.8715224 v -1.40364 z" /> - <path - id="path1441" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 4.3966564,185.46913 q -0.087728,0.47958 -0.087728,1.50892 0,1.02934 0.087728,1.50891 0.1052732,0.56731 0.3860015,0.93576 0.2865769,0.36261 0.7427605,0.36261 0.4561836,0 0.736912,-0.36261 0.2865768,-0.36845 0.39185,-0.93576 0.087728,-0.48542 0.087728,-1.50891 0,-1.02934 -0.087728,-1.50892 -0.1052732,-0.5673 -0.39185,-0.92991 -0.2807284,-0.36846 -0.736912,-0.36846 -0.4561836,0 -0.7427605,0.36846 -0.2807283,0.36261 -0.3860015,0.92991 z m -1.4211874,3.55005 q -0.1637582,-0.64919 -0.1637582,-2.04113 0,-1.39195 0.1637582,-2.04113 0.2339403,-0.94746 0.8831247,-1.5557 0.6491844,-0.6141 1.6668247,-0.6141 1.0176403,0 1.6668247,0.6141 0.6491844,0.60824 0.8831247,1.5557 0.1637582,0.64918 0.1637582,2.04113 0,1.39194 -0.1637582,2.04113 -0.2339403,0.94746 -0.8831247,1.56155 -0.6491844,0.60824 -1.6668247,0.60824 -1.0176403,0 -1.6668247,-0.60824 -0.6491844,-0.61409 -0.8831247,-1.56155 z" /> - <path - id="path1443" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 10.37968,185.46913 q -0.08773,0.47958 -0.08773,1.50892 0,1.02934 0.08773,1.50891 0.105273,0.56731 0.386001,0.93576 0.286577,0.36261 0.742761,0.36261 0.456183,0 0.736912,-0.36261 0.286576,-0.36845 0.39185,-0.93576 0.08773,-0.48542 0.08773,-1.50891 0,-1.02934 -0.08773,-1.50892 -0.105274,-0.5673 -0.39185,-0.92991 -0.280729,-0.36846 -0.736912,-0.36846 -0.456184,0 -0.742761,0.36846 -0.280728,0.36261 -0.386001,0.92991 z m -1.4211878,3.55005 q -0.1637582,-0.64919 -0.1637582,-2.04113 0,-1.39195 0.1637582,-2.04113 0.2339403,-0.94746 0.8831247,-1.5557 0.6491841,-0.6141 1.6668251,-0.6141 1.01764,0 1.666824,0.6141 0.649185,0.60824 0.883125,1.5557 0.163758,0.64918 0.163758,2.04113 0,1.39194 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60824 -1.666824,0.60824 -1.017641,0 -1.6668251,-0.60824 -0.6491844,-0.61409 -0.8831247,-1.56155 z" /> - <path - id="path1445" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 15.245638,191.0954 v -8.2347 h 1.497218 v 6.83106 h 3.368741 v 1.40364 z" /> - <path - id="path1447" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 23.357518,191.18897 q -0.959156,0 -2.163948,-0.18715 v -1.42703 q 1.40949,0.21054 2.163948,0.21054 1.041034,0 1.041034,-1.11121 0,-0.30998 -0.152061,-0.58486 -0.146213,-0.28072 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70191 0.649184,-0.69013 1.660976,-0.69013 0.789548,0 2.163948,0.14622 v 1.40949 q -1.421188,-0.15207 -2.163948,-0.15207 -0.222244,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25733 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16376 -0.03509,0.32167 0,0.69012 0.46788,0.90067 0.152061,0.0702 0.690124,0.15791 0.543911,0.0819 0.877276,0.25733 0.467881,0.23979 0.772003,0.61995 0.304123,0.38015 0.421093,0.76615 0.11697,0.386 0.11697,0.80125 0,1.13461 -0.701821,1.82473 -0.701821,0.69012 -1.836431,0.69012 z" /> - </g> - <path - id="rect830-6-9" - d="m 45.34529,70.721588 h 82.57136 c 0.28796,0 0.51978,0.231823 0.51978,0.519782 v 22.170878 c 0,0.287959 -0.23182,0.519782 -0.51978,0.519782 H 45.34529 c -0.287959,0 -0.519782,-0.231823 -0.519782,-0.519782 V 71.24137 c 0,-0.287959 0.231823,-0.519782 0.519782,-0.519782 z" - style="fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.48264033;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="rect1070" - transform="matrix(0.26458333,0,0,0.26458333,-113.7608,68.445953)" - d="M 645.5918 29.134766 C 643.442 29.114356 639.375 29.195312 639.375 29.195312 C 639.17775 29.195312 639.01953 29.360876 639.01953 29.566406 L 639.01953 32.089844 L 635.31445 32.089844 L 635.31445 29.566406 C 635.31445 29.360838 635.15624 29.195312 634.95898 29.195312 L 630.54297 29.195312 C 630.34572 29.195312 630.18555 29.360876 630.18555 29.566406 L 630.18555 38.919922 C 630.18555 39.12549 630.34556 39.291016 630.54297 39.291016 L 634.95898 39.291016 C 635.15624 39.291016 635.31445 39.125528 635.31445 38.919922 L 635.31445 36.396484 L 639.01953 36.396484 L 639.01953 38.919922 C 639.01953 39.12549 639.17778 39.291016 639.375 39.291016 L 641.61133 39.291016 L 641.61133 51.197266 C 640.32963 51.705951 639.42578 52.946976 639.42578 54.414062 L 639.42578 72.451172 C 639.42578 74.372824 640.97288 75.919922 642.89453 75.919922 C 644.81618 75.919922 646.36328 74.372824 646.36328 72.451172 L 646.36328 54.414062 C 646.36328 52.947632 645.46047 51.706384 644.17969 51.197266 L 644.17969 39.291016 L 646.59961 39.291016 C 646.98522 38.951538 647.35638 37.589933 649.41602 36.548828 C 654.94693 34.648852 657.34877 39.356838 657.30664 38.707031 C 657.26159 38.013291 656.95611 33.66127 652.12695 31.109375 C 650.02303 30.163295 647.54811 29.233966 646.88086 29.195312 C 646.812 29.159029 646.3084 29.141569 645.5918 29.134766 z " - style="fill:#e5b62f;fill-opacity:1;fill-rule:nonzero;stroke:#e5b62f;stroke-width:0.82411814;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="path1078" - transform="matrix(0.26458333,0,0,0.26458333,-113.7608,68.445953)" - d="M 650.42773 14.667969 C 648.53022 14.699515 647.02835 15.486642 646.11719 16.164062 C 644.90231 17.067275 644.51147 17.583325 642.67383 17.587891 C 640.83619 17.592426 640.44591 17.076857 639.22656 16.179688 C 638.00721 15.282522 635.73667 14.199989 632.88867 14.970703 C 630.04067 15.741413 628.62394 17.821281 628.02344 19.210938 C 627.42293 20.600583 627.34502 21.24324 625.75586 22.166016 C 624.1667 23.088791 623.56905 22.839157 622.06445 22.671875 C 620.55988 22.504593 618.04981 22.701586 615.96875 24.792969 C 613.88766 26.884419 613.70025 29.396668 613.875 30.900391 C 614.04973 32.404121 614.30351 33.000021 613.38867 34.59375 C 612.4738 36.187479 611.83391 36.267592 610.44727 36.875 C 609.06061 37.482419 606.98506 38.907976 606.22852 41.759766 C 605.47198 44.611551 606.5695 46.876917 607.47266 48.091797 C 608.37585 49.306681 608.89191 49.697516 608.89648 51.535156 C 608.90102 53.372812 608.38544 53.763074 607.48828 54.982422 C 606.5911 56.201788 605.50859 58.472306 606.2793 61.320312 C 607.05001 64.168285 609.12987 65.585037 610.51953 66.185547 C 611.90917 66.786057 612.55182 66.863977 613.47461 68.453125 C 614.39737 70.042292 614.14777 70.639947 613.98047 72.144531 C 613.81319 73.649101 614.01017 76.159166 616.10156 78.240234 C 618.19302 80.321303 620.70525 80.50289 622.20898 80.328125 C 623.71271 80.15336 624.30862 79.905448 625.90234 80.820312 C 627.49607 81.735177 627.57617 82.375059 628.18359 83.761719 C 628.79101 85.148359 630.21463 87.223921 633.06641 87.980469 C 635.91818 88.737002 639.41586 87.29094 639.40039 86.736328 C 639.38452 86.181716 637.04298 87.137298 634.69531 85.660156 C 632.34764 84.183019 632.51563 83.269454 631.99805 82.087891 C 631.48046 80.906308 630.55562 78.689729 627.97266 77.207031 C 625.38972 75.724315 623.0079 76.044431 621.72656 76.193359 C 620.44522 76.342311 620.35948 76.600961 619.03711 75.285156 C 617.71475 73.969336 617.97855 73.885573 618.12109 72.603516 C 618.26366 71.321443 618.56976 68.940775 617.07422 66.365234 C 615.57867 63.789694 613.35795 62.873012 612.17383 62.361328 C 610.9897 61.849625 610.78809 62.037024 610.30078 60.236328 C 609.81348 58.435595 610.07731 58.48826 610.8418 57.449219 C 611.60627 56.410196 613.06601 54.503632 613.05859 51.525391 C 613.05141 48.547123 611.58214 46.642623 610.8125 45.607422 C 610.04287 44.572209 609.77555 44.627329 610.25391 42.824219 C 610.73224 41.021108 610.93757 41.208982 612.11914 40.691406 C 613.30072 40.173826 615.51923 39.247018 617.00195 36.664062 C 618.48465 34.081111 618.16455 31.699308 618.01562 30.417969 C 617.86679 29.13663 617.60802 29.050889 618.92383 27.728516 C 620.23964 26.406142 620.32145 26.666049 621.60352 26.808594 C 622.88556 26.95112 625.26821 27.263126 627.84375 25.767578 C 630.41929 24.272042 631.3301 22.051317 631.8418 20.867188 C 632.3535 19.683054 632.17193 19.47949 633.97266 18.992188 C 635.77337 18.504878 635.72074 18.768729 636.75977 19.533203 C 637.7988 20.297681 639.70535 21.759369 642.68359 21.751953 C 645.66185 21.744394 647.56245 20.273539 648.59766 19.503906 C 649.63286 18.734262 649.57579 18.466968 651.37891 18.945312 C 653.18202 19.42365 653 19.630929 653.51758 20.8125 C 654.03516 21.994071 654.96197 24.2126 657.54492 25.695312 C 660.12787 27.178029 662.50968 26.855964 663.79102 26.707031 C 665.07235 26.558118 665.15615 26.299422 666.47852 27.615234 C 667.80089 28.931043 667.53903 29.01481 667.39648 30.296875 C 667.25396 31.578925 666.94586 33.959616 668.44141 36.535156 C 669.93695 39.110696 672.15766 40.021516 673.3418 40.533203 C 674.52593 41.044906 674.72949 40.863371 675.2168 42.664062 C 675.7041 44.464769 675.44026 44.414099 674.67578 45.453125 C 673.9113 46.49214 672.44963 48.396744 672.45703 51.375 C 672.46459 54.353256 673.93349 56.253857 674.70312 57.289062 C 675.47277 58.324271 675.74006 58.267198 675.26172 60.070312 C 674.78338 61.873442 674.57806 61.69335 673.39648 62.210938 C 672.21491 62.728506 669.99639 63.653384 668.51367 66.236328 C 667.03096 68.819276 667.35303 71.201071 667.50195 72.482422 C 667.65087 73.763754 667.90956 73.849513 666.59375 75.171875 C 665.27794 76.494252 665.19417 76.230451 663.91211 76.087891 C 662.63005 75.945327 660.24937 75.639244 657.67383 77.134766 C 655.09828 78.630325 654.18162 80.851019 653.66992 82.035156 C 653.24864 83.010054 653.23891 83.322912 652.26562 83.673828 C 652.1257 83.597203 651.96815 83.522961 651.77734 83.451172 C 650.1166 82.826344 649.89823 83.672201 648.53516 84.6875 C 647.31134 85.599071 646.43978 85.71314 646.48828 86.912109 C 646.37109 86.900044 646.26002 86.897958 646.17188 86.908203 C 645.89359 86.940329 645.78323 86.985729 645.48828 86.816406 C 645.19333 86.647083 645.17884 86.53009 645.06641 86.273438 C 644.954 86.016845 644.69181 85.632193 644.16406 85.492188 C 643.63627 85.352194 642.98735 85.619966 642.99023 85.722656 C 642.99317 85.825195 643.42686 85.648426 643.86133 85.921875 C 644.2958 86.195248 644.26554 86.36337 644.36133 86.582031 C 644.45706 86.800715 644.62746 87.211931 645.10547 87.486328 C 645.58348 87.760722 646.02458 87.701419 646.26172 87.673828 C 646.44171 87.652885 646.51679 87.659979 646.63867 87.751953 C 647.01093 88.948557 647.88357 88.833386 649.39062 89.400391 C 651.05137 90.025219 651.56075 90.775064 652.92383 89.759766 C 653.92163 89.016544 653.81665 88.477126 653.57617 87.601562 C 655.79683 86.673726 656.96459 84.914904 657.49414 83.689453 C 658.09465 82.299812 658.17256 81.657143 659.76172 80.734375 C 661.35088 79.811584 661.94855 80.061222 663.45312 80.228516 C 664.9577 80.39576 667.46776 80.198812 669.54883 78.107422 C 671.6299 76.015983 671.81735 73.503719 671.64258 72 C 671.46781 70.496277 671.21405 69.90038 672.12891 68.306641 C 673.04377 66.71292 673.68367 66.632821 675.07031 66.025391 C 676.45696 65.417975 678.53253 63.994364 679.28906 61.142578 C 680.0456 58.290792 678.94815 56.02347 678.04492 54.808594 C 677.1417 53.593714 676.62566 53.204843 676.62109 51.367188 C 676.61656 49.529555 677.13213 49.139277 678.0293 47.919922 C 678.92646 46.700567 680.00899 44.428069 679.23828 41.580078 C 678.46757 38.732095 676.3877 37.31535 674.99805 36.714844 C 673.6084 36.114337 672.96575 36.036421 672.04297 34.447266 C 671.12019 32.858114 671.36982 32.260436 671.53711 30.755859 C 671.70439 29.251275 671.5074 26.741225 669.41602 24.660156 C 667.32457 22.57908 664.81232 22.391633 663.30859 22.566406 C 661.80487 22.741172 661.20896 22.994939 659.61523 22.080078 C 658.02151 21.165217 657.9414 20.52532 657.33398 19.138672 C 656.72657 17.752024 655.301 15.676462 652.44922 14.919922 C 651.73627 14.730789 651.06024 14.657453 650.42773 14.667969 z " - style="fill:#336790;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.34504497;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <g - id="text1090" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#336790;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="SETUP"> - <path - id="path1424" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 70.30616,86.537737 q -0.959155,0 -2.163948,-0.187153 v -1.427036 q 1.409491,0.210547 2.163948,0.210547 1.041035,0 1.041035,-1.111217 0,-0.309971 -0.152062,-0.584851 -0.146212,-0.280728 -0.415244,-0.386001 -0.169606,-0.06433 -0.46788,-0.105273 -0.298274,-0.04094 -0.614094,-0.11697 -0.30997,-0.08188 -0.596547,-0.251486 -1.09952,-0.655033 -1.09952,-2.070372 0,-1.011792 0.643336,-1.701916 0.649184,-0.690124 1.660976,-0.690124 0.789549,0 2.163948,0.146213 v 1.409491 q -1.421187,-0.152062 -2.163948,-0.152062 -0.222243,0 -0.39185,0.09942 -0.163758,0.09943 -0.251486,0.257334 -0.08773,0.15791 -0.128667,0.321668 -0.03509,0.163758 -0.03509,0.321668 0,0.690124 0.467881,0.90067 0.152061,0.07018 0.690124,0.15791 0.543911,0.08188 0.877276,0.257334 0.46788,0.239789 0.772003,0.619942 0.304122,0.380153 0.421092,0.766155 0.116971,0.386001 0.116971,0.801245 0,1.134611 -0.701821,1.824735 -0.701821,0.690124 -1.836432,0.690124 z" /> - <path - id="path1426" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 74.160327,78.209462 h 4.491654 v 1.403641 h -2.994436 v 1.988493 h 2.245827 v 1.403642 h -2.245827 v 2.035281 h 2.994436 v 1.403641 h -4.491654 z" /> - <path - id="path1428" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 85.009308,78.209462 v 1.403641 h -1.871522 v 6.831057 h -1.497218 v -6.831057 h -1.871523 v -1.403641 z" /> - <path - id="path1430" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 85.752069,78.209462 h 1.497218 v 5.240263 q 0,0.549759 0.04094,0.748609 0.08773,0.421092 0.374305,0.678426 0.292425,0.257335 0.707669,0.257335 0.415244,0 0.701821,-0.257335 0.292426,-0.257334 0.380153,-0.678426 0.04094,-0.19885 0.04094,-0.748609 v -5.240263 h 1.497218 v 5.240263 q 0,0.824639 -0.09358,1.222338 -0.19885,0.824639 -0.888973,1.345156 -0.684276,0.520518 -1.637583,0.520518 -0.953306,0 -1.64343,-0.520518 -0.684276,-0.520517 -0.883125,-1.345156 -0.09358,-0.397699 -0.09358,-1.222338 z" /> - <path - id="path1432" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 93.606614,79.613103 v 2.386192 h 0.362608 q 0.502971,0 0.731063,-0.04679 0.397699,-0.08188 0.631639,-0.39185 0.239789,-0.309971 0.239789,-0.754458 0,-0.444487 -0.239789,-0.754457 -0.23394,-0.309971 -0.631639,-0.39185 -0.228092,-0.04679 -0.731063,-0.04679 z m -1.497218,-1.403641 h 1.848129 q 0.865579,0 1.234035,0.08773 0.830488,0.198849 1.351005,0.871428 0.526366,0.66673 0.526366,1.637582 0,0.970852 -0.526366,1.643431 -0.520517,0.66673 -1.351005,0.865579 -0.368456,0.08773 -1.234035,0.08773 h -0.350911 v 3.041224 h -1.497218 z" /> - </g> - <g - id="text1094" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#e5b62f;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="TOOLS"> - <path - id="path1413" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 102.95838,78.209462 v 1.403641 h -1.87152 v 6.831057 h -1.497216 v -6.831057 h -1.871522 v -1.403641 z" /> - <path - id="path1415" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 105.19251,80.817896 q -0.0877,0.479578 -0.0877,1.508915 0,1.029337 0.0877,1.508915 0.10528,0.567305 0.38601,0.935761 0.28657,0.362608 0.74276,0.362608 0.45618,0 0.73691,-0.362608 0.28658,-0.368456 0.39185,-0.935761 0.0877,-0.485426 0.0877,-1.508915 0,-1.029337 -0.0877,-1.508915 -0.10527,-0.567305 -0.39185,-0.929913 -0.28073,-0.368456 -0.73691,-0.368456 -0.45619,0 -0.74276,0.368456 -0.28073,0.362608 -0.38601,0.929913 z m -1.42118,3.550044 q -0.16376,-0.649184 -0.16376,-2.041129 0,-1.391945 0.16376,-2.041129 0.23394,-0.947458 0.88312,-1.555703 0.64919,-0.614094 1.66683,-0.614094 1.01764,0 1.66682,0.614094 0.64919,0.608245 0.88313,1.555703 0.16375,0.649184 0.16375,2.041129 0,1.391945 -0.16375,2.041129 -0.23394,0.947458 -0.88313,1.561552 -0.64918,0.608245 -1.66682,0.608245 -1.01764,0 -1.66683,-0.608245 -0.64918,-0.614094 -0.88312,-1.561552 z" /> - <path - id="path1417" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 111.17554,80.817896 q -0.0877,0.479578 -0.0877,1.508915 0,1.029337 0.0877,1.508915 0.10527,0.567305 0.386,0.935761 0.28658,0.362608 0.74276,0.362608 0.45618,0 0.73691,-0.362608 0.28658,-0.368456 0.39185,-0.935761 0.0877,-0.485426 0.0877,-1.508915 0,-1.029337 -0.0877,-1.508915 -0.10527,-0.567305 -0.39185,-0.929913 -0.28073,-0.368456 -0.73691,-0.368456 -0.45618,0 -0.74276,0.368456 -0.28073,0.362608 -0.386,0.929913 z m -1.42119,3.550044 q -0.16376,-0.649184 -0.16376,-2.041129 0,-1.391945 0.16376,-2.041129 0.23394,-0.947458 0.88313,-1.555703 0.64918,-0.614094 1.66682,-0.614094 1.01764,0 1.66682,0.614094 0.64919,0.608245 0.88313,1.555703 0.16376,0.649184 0.16376,2.041129 0,1.391945 -0.16376,2.041129 -0.23394,0.947458 -0.88313,1.561552 -0.64918,0.608245 -1.66682,0.608245 -1.01764,0 -1.66682,-0.608245 -0.64919,-0.614094 -0.88313,-1.561552 z" /> - <path - id="path1419" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 116.0415,86.44416 v -8.234698 h 1.49721 v 6.831057 h 3.36874 v 1.403641 z" /> - <path - id="path1421" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 124.15338,86.537737 q -0.95916,0 -2.16395,-0.187153 v -1.427036 q 1.40949,0.210547 2.16395,0.210547 1.04103,0 1.04103,-1.111217 0,-0.309971 -0.15206,-0.584851 -0.14621,-0.280728 -0.41524,-0.386001 -0.16961,-0.06433 -0.46789,-0.105273 -0.29827,-0.04094 -0.61409,-0.11697 -0.30997,-0.08188 -0.59655,-0.251486 -1.09952,-0.655033 -1.09952,-2.070372 0,-1.011792 0.64334,-1.701916 0.64918,-0.690124 1.66098,-0.690124 0.78954,0 2.16394,0.146213 v 1.409491 q -1.42118,-0.152062 -2.16394,-0.152062 -0.22225,0 -0.39185,0.09942 -0.16376,0.09943 -0.25149,0.257334 -0.0877,0.15791 -0.12867,0.321668 -0.0351,0.163758 -0.0351,0.321668 0,0.690124 0.46788,0.90067 0.15206,0.07018 0.69013,0.15791 0.54391,0.08188 0.87727,0.257334 0.46788,0.239789 0.77201,0.619942 0.30412,0.380153 0.42109,0.766155 0.11697,0.386001 0.11697,0.801245 0,1.134611 -0.70182,1.824735 -0.70182,0.690124 -1.83643,0.690124 z" /> - </g> - <path - id="rect830-6" - d="m 45.34529,102.90627 h 82.57136 c 0.28796,0 0.51978,0.23183 0.51978,0.51978 v 22.17088 c 0,0.28796 -0.23182,0.51978 -0.51978,0.51978 H 45.34529 c -0.287959,0 -0.519782,-0.23182 -0.519782,-0.51978 v -22.17088 c 0,-0.28795 0.231823,-0.51978 0.519782,-0.51978 z" - style="fill:#336790;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.48264033;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="rect1168" - transform="matrix(0.26458333,0,0,0.26458333,-113.7608,68.445953)" - d="M 645.5918 150.77734 C 643.442 150.75693 639.375 150.83789 639.375 150.83789 C 639.17775 150.83789 639.01953 151.00338 639.01953 151.20898 L 639.01953 153.73438 L 635.31445 153.73438 L 635.31445 151.20898 C 635.31445 151.00338 635.15624 150.83789 634.95898 150.83789 L 630.54297 150.83789 C 630.34572 150.83789 630.18555 151.00338 630.18555 151.20898 L 630.18555 160.5625 C 630.18555 160.76811 630.34556 160.93359 630.54297 160.93359 L 634.95898 160.93359 C 635.15624 160.93359 635.31445 160.76811 635.31445 160.5625 L 635.31445 158.03906 L 639.01953 158.03906 L 639.01953 160.5625 C 639.01953 160.76811 639.17778 160.93359 639.375 160.93359 L 641.61133 160.93359 L 641.61133 172.83984 C 640.3296 173.34851 639.42578 174.59148 639.42578 176.05859 L 639.42578 194.0957 C 639.42578 196.01733 640.97288 197.56445 642.89453 197.56445 C 644.81618 197.56445 646.36328 196.01733 646.36328 194.0957 L 646.36328 176.05859 C 646.36328 174.59213 645.4605 173.34895 644.17969 172.83984 L 644.17969 160.93359 L 646.59961 160.93359 C 646.98522 160.59419 647.35638 159.23248 649.41602 158.19141 C 654.94693 156.2914 657.34877 160.99942 657.30664 160.34961 C 657.26159 159.65588 656.95611 155.30385 652.12695 152.75195 C 650.02303 151.80586 647.54811 150.87656 646.88086 150.83789 C 646.812 150.80161 646.3084 150.78415 645.5918 150.77734 z " - style="fill:#e5b62f;fill-opacity:1;fill-rule:nonzero;stroke:#e5b62f;stroke-width:0.82411814;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="path1176" - transform="matrix(0.26458333,0,0,0.26458333,-113.7608,68.445953)" - d="M 650.42773 136.3125 C 648.53022 136.34402 647.02835 137.12913 646.11719 137.80664 C 644.90231 138.70987 644.51147 139.2259 642.67383 139.23047 C 640.83619 139.23425 640.44591 138.71941 639.22656 137.82227 C 638.00721 136.92508 635.73667 135.84451 632.88867 136.61523 C 630.04067 137.38592 628.62394 139.46581 628.02344 140.85547 C 627.42293 142.24509 627.34502 142.88582 625.75586 143.80859 C 624.1667 144.7314 623.56905 144.48369 622.06445 144.31641 C 620.55988 144.14897 618.04981 144.34494 615.96875 146.43555 C 613.88766 148.52699 613.70025 151.0412 613.875 152.54492 C 614.04973 154.04868 614.30351 154.64261 613.38867 156.23633 C 612.4738 157.83004 611.83391 157.91212 610.44727 158.51953 C 609.06061 159.12694 606.98506 160.55058 606.22852 163.40234 C 605.47198 166.25415 606.5695 168.51948 607.47266 169.73438 C 608.37585 170.94927 608.89191 171.34009 608.89648 173.17773 C 608.90102 175.01538 608.38544 175.40565 607.48828 176.625 C 606.5911 177.84439 605.50859 180.11487 606.2793 182.96289 C 607.05001 185.81084 609.12987 187.2276 610.51953 187.82812 C 611.90917 188.42862 612.55182 188.50656 613.47461 190.0957 C 614.39737 191.68488 614.14777 192.28252 613.98047 193.78711 C 613.81319 195.29166 614.01017 197.80173 616.10156 199.88281 C 618.19302 201.96386 620.70525 202.14547 622.20898 201.9707 C 623.71271 201.79571 624.30862 201.54802 625.90234 202.46289 C 627.49607 203.37773 627.57617 204.01766 628.18359 205.4043 C 628.79101 206.79093 630.21463 208.8665 633.06641 209.62305 C 635.91818 210.37959 639.41586 208.93547 639.40039 208.38086 C 639.38452 207.82625 637.04298 208.77985 634.69531 207.30273 C 632.34764 205.82558 632.51563 204.91398 631.99805 203.73242 C 631.48046 202.55083 630.55562 200.33232 627.97266 198.84961 C 625.38972 197.3669 623.0079 197.68702 621.72656 197.83594 C 620.44522 197.98485 620.35948 198.24549 619.03711 196.92969 C 617.71475 195.61388 617.97855 195.5301 618.12109 194.24805 C 618.26366 192.96599 618.56976 190.58333 617.07422 188.00781 C 615.57867 185.43225 613.35795 184.51558 612.17383 184.00391 C 610.9897 183.4922 610.78809 183.67944 610.30078 181.87891 C 609.81348 180.07815 610.07731 180.13083 610.8418 179.0918 C 611.60627 178.05277 613.06601 176.1462 613.05859 173.16797 C 613.05141 170.1897 611.58214 168.28517 610.8125 167.25 C 610.04287 166.21479 609.77555 166.27189 610.25391 164.46875 C 610.73224 162.66565 610.93757 162.85155 612.11914 162.33398 C 613.30072 161.81642 615.51923 160.88957 617.00195 158.30664 C 618.48465 155.72367 618.16455 153.34384 618.01562 152.0625 C 617.86679 150.78116 617.60802 150.69543 618.92383 149.37305 C 620.23964 148.05067 620.32145 148.30861 621.60352 148.45117 C 622.88556 148.59366 625.26821 148.90572 627.84375 147.41016 C 630.41929 145.91463 631.3301 143.69389 631.8418 142.50977 C 632.3535 141.3256 632.17193 141.12206 633.97266 140.63477 C 635.77337 140.14743 635.72074 140.41325 636.75977 141.17773 C 637.7988 141.94222 639.70535 143.40194 642.68359 143.39453 C 645.66185 143.38697 647.56245 141.91806 648.59766 141.14844 C 649.63286 140.37877 649.57579 140.11151 651.37891 140.58984 C 653.18202 141.06818 653 141.27352 653.51758 142.45508 C 654.03516 143.63667 654.96197 145.85518 657.54492 147.33789 C 660.12787 148.8206 662.50968 148.49856 663.79102 148.34961 C 665.07235 148.2007 665.15615 147.94201 666.47852 149.25781 C 667.80089 150.57362 667.53903 150.6574 667.39648 151.93945 C 667.25396 153.22151 666.94586 155.60218 668.44141 158.17773 C 669.93695 160.75326 672.15766 161.66606 673.3418 162.17773 C 674.52593 162.68944 674.72949 162.50592 675.2168 164.30664 C 675.7041 166.10736 675.44026 166.05671 674.67578 167.0957 C 673.9113 168.13473 672.44963 170.04126 672.45703 173.01953 C 672.46459 175.9978 673.93349 177.89643 674.70312 178.93164 C 675.47277 179.96685 675.74006 179.90975 675.26172 181.71289 C 674.78338 183.51599 674.57806 183.33595 673.39648 183.85352 C 672.21491 184.37108 669.99639 185.29793 668.51367 187.88086 C 667.03096 190.46383 667.35303 192.84366 667.50195 194.125 C 667.65087 195.40634 667.90956 195.49207 666.59375 196.81445 C 665.27794 198.13683 665.19417 197.87299 663.91211 197.73047 C 662.63005 197.58798 660.24937 197.28182 657.67383 198.77734 C 655.09828 200.2729 654.18162 202.49361 653.66992 203.67773 C 653.24891 204.65203 653.23891 204.96577 652.26758 205.31641 C 652.12734 205.23951 651.96879 205.16578 651.77734 205.09375 C 650.1166 204.46892 649.89823 205.31478 648.53516 206.33008 C 647.31134 207.24165 646.43978 207.35572 646.48828 208.55469 C 646.37109 208.54263 646.26002 208.54053 646.17188 208.55078 C 645.89359 208.5848 645.78323 208.62831 645.48828 208.45898 C 645.19333 208.28966 645.17884 208.17265 645.06641 207.91602 C 644.954 207.65939 644.69181 207.27476 644.16406 207.13477 C 643.63627 206.99492 642.98735 207.26243 642.99023 207.36523 C 642.99317 207.46766 643.42686 207.29082 643.86133 207.56445 C 644.2958 207.83771 644.26554 208.00593 644.36133 208.22461 C 644.45706 208.44344 644.62746 208.85447 645.10547 209.12891 C 645.58348 209.4033 646.02458 209.344 646.26172 209.31641 C 646.44171 209.29632 646.51679 209.30289 646.63867 209.39453 C 647.01093 210.59114 647.88357 210.47596 649.39062 211.04297 C 651.05137 211.6678 651.56075 212.41764 652.92383 211.40234 C 653.92146 210.65925 653.81851 210.11944 653.57812 209.24414 C 655.79736 208.31606 656.96476 206.55709 657.49414 205.33203 C 658.09465 203.94237 658.17256 203.29972 659.76172 202.37695 C 661.35088 201.45418 661.94855 201.70381 663.45312 201.87109 C 664.9577 202.03853 667.46776 201.84065 669.54883 199.75 C 671.6299 197.65856 671.81735 195.1463 671.64258 193.64258 C 671.46781 192.13886 671.21405 191.54293 672.12891 189.94922 C 673.04377 188.35547 673.68367 188.27541 675.07031 187.66797 C 676.45696 187.06056 678.53253 185.63696 679.28906 182.78516 C 680.0456 179.93339 678.94815 177.66603 678.04492 176.45117 C 677.1417 175.23628 676.62566 174.84741 676.62109 173.00977 C 676.61656 171.17212 677.13213 170.78185 678.0293 169.5625 C 678.92646 168.34315 680.00899 166.07064 679.23828 163.22266 C 678.46757 160.37467 676.3877 158.95987 674.99805 158.35938 C 673.6084 157.75885 672.96575 157.67902 672.04297 156.08984 C 671.12019 154.5007 671.36982 153.90498 671.53711 152.40039 C 671.70439 150.8958 671.5074 148.38378 669.41602 146.30273 C 667.32457 144.22165 664.81232 144.03617 663.30859 144.21094 C 661.80487 144.38593 661.20896 144.63753 659.61523 143.72266 C 658.02151 142.80778 657.9414 142.16788 657.33398 140.78125 C 656.72657 139.39458 655.301 137.321 652.44922 136.56445 C 651.73627 136.37533 651.06024 136.30199 650.42773 136.3125 z " - style="fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.34504497;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <g - id="text1188" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="SETUP"> - <path - id="path1385" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 70.30616,118.72241 q -0.959155,0 -2.163948,-0.18716 v -1.42703 q 1.409491,0.21054 2.163948,0.21054 1.041035,0 1.041035,-1.11121 0,-0.30997 -0.152062,-0.58485 -0.146212,-0.28073 -0.415244,-0.386 -0.169606,-0.0643 -0.46788,-0.10528 -0.298274,-0.0409 -0.614094,-0.11697 -0.30997,-0.0819 -0.596547,-0.25148 -1.09952,-0.65504 -1.09952,-2.07038 0,-1.01179 0.643336,-1.70191 0.649184,-0.69013 1.660976,-0.69013 0.789549,0 2.163948,0.14622 v 1.40949 q -1.421187,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32166 -0.03509,0.16376 -0.03509,0.32167 0,0.69013 0.467881,0.90067 0.152061,0.0702 0.690124,0.15791 0.543911,0.0819 0.877276,0.25734 0.46788,0.23978 0.772003,0.61994 0.304122,0.38015 0.421092,0.76615 0.116971,0.386 0.116971,0.80125 0,1.13461 -0.701821,1.82473 -0.701821,0.69013 -1.836432,0.69013 z" /> - <path - id="path1387" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 74.160327,110.39413 h 4.491654 v 1.40364 h -2.994436 v 1.9885 h 2.245827 v 1.40364 h -2.245827 v 2.03528 h 2.994436 v 1.40364 h -4.491654 z" /> - <path - id="path1389" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 85.009308,110.39413 v 1.40364 h -1.871522 v 6.83106 h -1.497218 v -6.83106 h -1.871523 v -1.40364 z" /> - <path - id="path1391" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 85.752069,110.39413 h 1.497218 v 5.24026 q 0,0.54976 0.04094,0.74861 0.08773,0.4211 0.374305,0.67843 0.292425,0.25733 0.707669,0.25733 0.415244,0 0.701821,-0.25733 0.292426,-0.25733 0.380153,-0.67843 0.04094,-0.19885 0.04094,-0.74861 v -5.24026 h 1.497218 v 5.24026 q 0,0.82464 -0.09358,1.22234 -0.19885,0.82464 -0.888973,1.34516 -0.684276,0.52052 -1.637583,0.52052 -0.953306,0 -1.64343,-0.52052 -0.684276,-0.52052 -0.883125,-1.34516 -0.09358,-0.3977 -0.09358,-1.22234 z" /> - <path - id="path1393" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 93.606614,111.79777 v 2.38619 h 0.362608 q 0.502971,0 0.731063,-0.0468 0.397699,-0.0819 0.631639,-0.39185 0.239789,-0.30997 0.239789,-0.75446 0,-0.44449 -0.239789,-0.75446 -0.23394,-0.30997 -0.631639,-0.39185 -0.228092,-0.0468 -0.731063,-0.0468 z m -1.497218,-1.40364 h 1.848129 q 0.865579,0 1.234035,0.0877 0.830488,0.19885 1.351005,0.87143 0.526366,0.66673 0.526366,1.63758 0,0.97085 -0.526366,1.64343 -0.520517,0.66673 -1.351005,0.86558 -0.368456,0.0877 -1.234035,0.0877 h -0.350911 v 3.04122 h -1.497218 z" /> - </g> - <g - id="text1192" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#e5b62f;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="TOOLS"> - <path - id="path1396" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 102.95838,110.39413 v 1.40364 h -1.87152 v 6.83106 h -1.497216 v -6.83106 h -1.871522 v -1.40364 z" /> - <path - id="path1398" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 105.19251,113.00257 q -0.0877,0.47957 -0.0877,1.50891 0,1.02934 0.0877,1.50892 0.10528,0.5673 0.38601,0.93576 0.28657,0.3626 0.74276,0.3626 0.45618,0 0.73691,-0.3626 0.28658,-0.36846 0.39185,-0.93576 0.0877,-0.48543 0.0877,-1.50892 0,-1.02934 -0.0877,-1.50891 -0.10527,-0.56731 -0.39185,-0.92992 -0.28073,-0.36845 -0.73691,-0.36845 -0.45619,0 -0.74276,0.36845 -0.28073,0.36261 -0.38601,0.92992 z m -1.42118,3.55004 q -0.16376,-0.64918 -0.16376,-2.04113 0,-1.39194 0.16376,-2.04113 0.23394,-0.94746 0.88312,-1.5557 0.64919,-0.6141 1.66683,-0.6141 1.01764,0 1.66682,0.6141 0.64919,0.60824 0.88313,1.5557 0.16375,0.64919 0.16375,2.04113 0,1.39195 -0.16375,2.04113 -0.23394,0.94746 -0.88313,1.56155 -0.64918,0.60825 -1.66682,0.60825 -1.01764,0 -1.66683,-0.60825 -0.64918,-0.61409 -0.88312,-1.56155 z" /> - <path - id="path1400" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 111.17554,113.00257 q -0.0877,0.47957 -0.0877,1.50891 0,1.02934 0.0877,1.50892 0.10527,0.5673 0.386,0.93576 0.28658,0.3626 0.74276,0.3626 0.45618,0 0.73691,-0.3626 0.28658,-0.36846 0.39185,-0.93576 0.0877,-0.48543 0.0877,-1.50892 0,-1.02934 -0.0877,-1.50891 -0.10527,-0.56731 -0.39185,-0.92992 -0.28073,-0.36845 -0.73691,-0.36845 -0.45618,0 -0.74276,0.36845 -0.28073,0.36261 -0.386,0.92992 z m -1.42119,3.55004 q -0.16376,-0.64918 -0.16376,-2.04113 0,-1.39194 0.16376,-2.04113 0.23394,-0.94746 0.88313,-1.5557 0.64918,-0.6141 1.66682,-0.6141 1.01764,0 1.66682,0.6141 0.64919,0.60824 0.88313,1.5557 0.16376,0.64919 0.16376,2.04113 0,1.39195 -0.16376,2.04113 -0.23394,0.94746 -0.88313,1.56155 -0.64918,0.60825 -1.66682,0.60825 -1.01764,0 -1.66682,-0.60825 -0.64919,-0.61409 -0.88313,-1.56155 z" /> - <path - id="path1402" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 116.0415,118.62883 v -8.2347 h 1.49721 v 6.83106 h 3.36874 v 1.40364 z" /> - <path - id="path1404" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 124.15338,118.72241 q -0.95916,0 -2.16395,-0.18716 v -1.42703 q 1.40949,0.21054 2.16395,0.21054 1.04103,0 1.04103,-1.11121 0,-0.30997 -0.15206,-0.58485 -0.14621,-0.28073 -0.41524,-0.386 -0.16961,-0.0643 -0.46789,-0.10528 -0.29827,-0.0409 -0.61409,-0.11697 -0.30997,-0.0819 -0.59655,-0.25148 -1.09952,-0.65504 -1.09952,-2.07038 0,-1.01179 0.64334,-1.70191 0.64918,-0.69013 1.66098,-0.69013 0.78954,0 2.16394,0.14622 v 1.40949 q -1.42118,-0.15206 -2.16394,-0.15206 -0.22225,0 -0.39185,0.0994 -0.16376,0.0994 -0.25149,0.25734 -0.0877,0.15791 -0.12867,0.32166 -0.0351,0.16376 -0.0351,0.32167 0,0.69013 0.46788,0.90067 0.15206,0.0702 0.69013,0.15791 0.54391,0.0819 0.87727,0.25734 0.46788,0.23978 0.77201,0.61994 0.30412,0.38015 0.42109,0.76615 0.11697,0.386 0.11697,0.80125 0,1.13461 -0.70182,1.82473 -0.70182,0.69013 -1.83643,0.69013 z" /> - </g> - <path - id="rect830-6-9-8" - d="m 45.34529,135.09094 h 82.57136 c 0.28796,0 0.51978,0.23182 0.51978,0.51978 v 22.17088 c 0,0.28796 -0.23182,0.51978 -0.51978,0.51978 H 45.34529 c -0.287959,0 -0.519782,-0.23182 -0.519782,-0.51978 v -22.17088 c 0,-0.28796 0.231823,-0.51978 0.519782,-0.51978 z" - style="fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.48264033;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="rect1222" - transform="matrix(0.26458333,0,0,0.26458333,-113.7608,68.445953)" - d="M 645.5918 272.41992 C 643.442 272.39951 639.375 272.48047 639.375 272.48047 C 639.17775 272.48047 639.01953 272.64791 639.01953 272.85352 L 639.01953 275.37695 L 635.31445 275.37695 L 635.31445 272.85352 C 635.31445 272.64791 635.15624 272.48047 634.95898 272.48047 L 630.54297 272.48047 C 630.34572 272.48047 630.18555 272.64791 630.18555 272.85352 L 630.18555 282.20703 C 630.18555 282.41264 630.34556 282.57617 630.54297 282.57617 L 634.95898 282.57617 C 635.15624 282.57617 635.31445 282.41264 635.31445 282.20703 L 635.31445 279.68164 L 639.01953 279.68164 L 639.01953 282.20703 C 639.01953 282.41264 639.17778 282.57617 639.375 282.57617 L 641.61133 282.57617 L 641.61133 294.48242 C 640.3296 294.99109 639.42578 296.23405 639.42578 297.70117 L 639.42578 315.73828 C 639.42578 317.65994 640.97288 319.20703 642.89453 319.20703 C 644.81618 319.20703 646.36328 317.65994 646.36328 315.73828 L 646.36328 297.70117 C 646.36328 296.23471 645.4605 294.99153 644.17969 294.48242 L 644.17969 282.57617 L 646.59961 282.57617 C 646.98522 282.23677 647.35638 280.87506 649.41602 279.83398 C 654.94693 277.93398 657.34877 282.642 657.30664 281.99219 C 657.26159 281.29846 656.95611 276.94643 652.12695 274.39453 C 650.02303 273.44844 647.54811 272.51913 646.88086 272.48047 C 646.812 272.44419 646.3084 272.42673 645.5918 272.41992 z " - style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:0.82411814;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="path1230" - transform="matrix(0.26458333,0,0,0.26458333,-113.7608,68.445953)" - d="M 650.42773 257.95508 C 648.53022 257.9866 647.02835 258.77171 646.11719 259.44922 C 644.90231 260.35245 644.51147 260.86847 642.67383 260.87305 C 640.83619 260.87683 640.44591 260.36199 639.22656 259.46484 C 638.00721 258.56766 635.73667 257.48709 632.88867 258.25781 C 630.04067 259.0285 628.62394 261.10839 628.02344 262.49805 C 627.42293 263.88767 627.34502 264.53035 625.75586 265.45312 C 624.1667 266.37593 623.56905 266.12627 622.06445 265.95898 C 620.55988 265.79155 618.04981 265.98947 615.96875 268.08008 C 613.88766 270.17152 613.70025 272.68378 613.875 274.1875 C 614.04973 275.69126 614.30351 276.28519 613.38867 277.87891 C 612.4738 279.47262 611.83391 279.5547 610.44727 280.16211 C 609.06061 280.76952 606.98506 282.19315 606.22852 285.04492 C 605.47198 287.89673 606.5695 290.16401 607.47266 291.37891 C 608.37585 292.5938 608.89191 292.98267 608.89648 294.82031 C 608.90102 296.65796 608.38544 297.04823 607.48828 298.26758 C 606.5911 299.48697 605.50859 301.7594 606.2793 304.60742 C 607.05001 307.45537 609.12987 308.87017 610.51953 309.4707 C 611.90917 310.07119 612.55182 310.14914 613.47461 311.73828 C 614.39737 313.32746 614.14777 313.9251 613.98047 315.42969 C 613.81319 316.93424 614.01017 319.44431 616.10156 321.52539 C 618.19302 323.60644 620.70525 323.78805 622.20898 323.61328 C 623.71271 323.43829 624.30862 323.19255 625.90234 324.10742 C 627.49607 325.02226 627.57617 325.66024 628.18359 327.04688 C 628.79101 328.43351 630.21463 330.50908 633.06641 331.26562 C 635.91818 332.02217 639.41586 330.57805 639.40039 330.02344 C 639.38452 329.46883 637.04298 330.42243 634.69531 328.94531 C 632.34764 327.46816 632.51563 326.55656 631.99805 325.375 C 631.48046 324.19341 630.55562 321.9749 627.97266 320.49219 C 625.38972 319.00948 623.0079 319.3296 621.72656 319.47852 C 620.44522 319.62743 620.35948 319.88807 619.03711 318.57227 C 617.71475 317.25646 617.97855 317.17268 618.12109 315.89062 C 618.26366 314.60857 618.56976 312.22787 617.07422 309.65234 C 615.57867 307.07678 613.35795 306.16011 612.17383 305.64844 C 610.9897 305.13673 610.78809 305.32201 610.30078 303.52148 C 609.81348 301.72073 610.07731 301.7734 610.8418 300.73438 C 611.60627 299.69535 613.06601 297.78878 613.05859 294.81055 C 613.05141 291.83228 611.58214 289.92775 610.8125 288.89258 C 610.04287 287.85737 609.77555 287.91447 610.25391 286.11133 C 610.73224 284.30823 610.93757 284.49413 612.11914 283.97656 C 613.30072 283.45899 615.51923 282.53215 617.00195 279.94922 C 618.48465 277.36625 618.16455 274.98641 618.01562 273.70508 C 617.86679 272.42374 617.60802 272.33801 618.92383 271.01562 C 620.23964 269.69324 620.32145 269.95119 621.60352 270.09375 C 622.88556 270.23624 625.26821 270.54829 627.84375 269.05273 C 630.41929 267.55721 631.3301 265.33647 631.8418 264.15234 C 632.3535 262.96818 632.17193 262.76464 633.97266 262.27734 C 635.77337 261.79001 635.72074 262.05583 636.75977 262.82031 C 637.7988 263.5848 639.70535 265.04452 642.68359 265.03711 C 645.66185 265.02955 647.56245 263.56064 648.59766 262.79102 C 649.63286 262.02135 649.57579 261.75408 651.37891 262.23242 C 653.18202 262.71076 653 262.9161 653.51758 264.09766 C 654.03516 265.27925 654.96197 267.49776 657.54492 268.98047 C 660.12787 270.46318 662.50968 270.14309 663.79102 269.99414 C 665.07235 269.84523 665.15615 269.58459 666.47852 270.90039 C 667.80089 272.2162 667.53903 272.29998 667.39648 273.58203 C 667.25396 274.86408 666.94586 277.24475 668.44141 279.82031 C 669.93695 282.39583 672.15766 283.30864 673.3418 283.82031 C 674.52593 284.33202 674.72949 284.15045 675.2168 285.95117 C 675.7041 287.75189 675.44026 287.69929 674.67578 288.73828 C 673.9113 289.77731 672.44963 291.68384 672.45703 294.66211 C 672.46459 297.64038 673.93349 299.53901 674.70312 300.57422 C 675.47277 301.60943 675.74006 301.55428 675.26172 303.35742 C 674.78338 305.16052 674.57806 304.97853 673.39648 305.49609 C 672.21491 306.01366 669.99639 306.94051 668.51367 309.52344 C 667.03096 312.1064 667.35303 314.48624 667.50195 315.76758 C 667.65087 317.04891 667.90956 317.13465 666.59375 318.45703 C 665.27794 319.77941 665.19417 319.51557 663.91211 319.37305 C 662.63005 319.23056 660.24937 318.9244 657.67383 320.41992 C 655.09828 321.91548 654.18162 324.13619 653.66992 325.32031 C 653.24893 326.29456 653.23876 326.60837 652.26758 326.95898 C 652.12734 326.88209 651.96879 326.80836 651.77734 326.73633 C 650.1166 326.1115 649.89823 326.95736 648.53516 327.97266 C 647.31134 328.88423 646.43978 328.9983 646.48828 330.19727 C 646.37109 330.1852 646.26002 330.18311 646.17188 330.19336 C 645.89359 330.22738 645.78323 330.27284 645.48828 330.10352 C 645.19333 329.93419 645.17884 329.81522 645.06641 329.55859 C 644.954 329.30196 644.69181 328.91734 644.16406 328.77734 C 643.63627 328.6375 642.98735 328.90501 642.99023 329.00781 C 642.99317 329.11024 643.42686 328.93339 643.86133 329.20703 C 644.2958 329.48029 644.26554 329.6485 644.36133 329.86719 C 644.45706 330.08602 644.62746 330.49705 645.10547 330.77148 C 645.58348 331.04588 646.02458 330.98657 646.26172 330.95898 C 646.44171 330.9389 646.51679 330.94547 646.63867 331.03711 C 647.01093 332.23371 647.88357 332.11854 649.39062 332.68555 C 651.05137 333.31038 651.56075 334.06217 652.92383 333.04688 C 653.92157 332.3037 653.81662 331.76415 653.57617 330.88867 C 655.79685 329.96084 656.96459 328.20008 657.49414 326.97461 C 658.09465 325.58495 658.17256 324.9423 659.76172 324.01953 C 661.35088 323.09676 661.94855 323.34639 663.45312 323.51367 C 664.9577 323.6811 667.46776 323.48322 669.54883 321.39258 C 671.6299 319.30114 671.81735 316.78888 671.64258 315.28516 C 671.46781 313.78143 671.21405 313.18746 672.12891 311.59375 C 673.04377 310 673.68367 309.91799 675.07031 309.31055 C 676.45696 308.70314 678.53253 307.27954 679.28906 304.42773 C 680.0456 301.57597 678.94815 299.3086 678.04492 298.09375 C 677.1417 296.87886 676.62566 296.48999 676.62109 294.65234 C 676.61656 292.8147 677.13213 292.42443 678.0293 291.20508 C 678.92646 289.98573 680.00899 287.71322 679.23828 284.86523 C 678.46757 282.01725 676.3877 280.60244 674.99805 280.00195 C 673.6084 279.40142 672.96575 279.3216 672.04297 277.73242 C 671.12019 276.14328 671.36982 275.54756 671.53711 274.04297 C 671.70439 272.53838 671.5074 270.02636 669.41602 267.94531 C 667.32457 265.86423 664.81232 265.67875 663.30859 265.85352 C 661.80487 266.02851 661.20896 266.28011 659.61523 265.36523 C 658.02151 264.45036 657.9414 263.81241 657.33398 262.42578 C 656.72657 261.03911 655.301 258.96358 652.44922 258.20703 C 651.73627 258.0179 651.06024 257.94457 650.42773 257.95508 z " - style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.34504497;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <g - id="text1242" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="SETUP"> - <path - id="path1372" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m 70.30616,150.90708 q -0.959155,0 -2.163948,-0.18715 v -1.42703 q 1.409491,0.21054 2.163948,0.21054 1.041035,0 1.041035,-1.11122 0,-0.30997 -0.152062,-0.58485 -0.146212,-0.28072 -0.415244,-0.386 -0.169606,-0.0643 -0.46788,-0.10527 -0.298274,-0.0409 -0.614094,-0.11697 -0.30997,-0.0819 -0.596547,-0.25149 -1.09952,-0.65503 -1.09952,-2.07037 0,-1.01179 0.643336,-1.70191 0.649184,-0.69013 1.660976,-0.69013 0.789549,0 2.163948,0.14621 v 1.4095 q -1.421187,-0.15207 -2.163948,-0.15207 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25733 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16376 -0.03509,0.32167 0,0.69012 0.467881,0.90067 0.152061,0.0702 0.690124,0.15791 0.543911,0.0819 0.877276,0.25733 0.46788,0.23979 0.772003,0.61994 0.304122,0.38016 0.421092,0.76616 0.116971,0.386 0.116971,0.80124 0,1.13462 -0.701821,1.82474 -0.701821,0.69012 -1.836432,0.69012 z" /> - <path - id="path1374" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m 74.160327,142.57881 h 4.491654 v 1.40364 h -2.994436 v 1.98849 h 2.245827 v 1.40364 h -2.245827 v 2.03529 h 2.994436 v 1.40364 h -4.491654 z" /> - <path - id="path1376" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m 85.009308,142.57881 v 1.40364 h -1.871522 v 6.83106 h -1.497218 v -6.83106 h -1.871523 v -1.40364 z" /> - <path - id="path1378" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m 85.752069,142.57881 h 1.497218 v 5.24026 q 0,0.54976 0.04094,0.74861 0.08773,0.42109 0.374305,0.67843 0.292425,0.25733 0.707669,0.25733 0.415244,0 0.701821,-0.25733 0.292426,-0.25734 0.380153,-0.67843 0.04094,-0.19885 0.04094,-0.74861 v -5.24026 h 1.497218 v 5.24026 q 0,0.82464 -0.09358,1.22234 -0.19885,0.82464 -0.888973,1.34516 -0.684276,0.52051 -1.637583,0.52051 -0.953306,0 -1.64343,-0.52051 -0.684276,-0.52052 -0.883125,-1.34516 -0.09358,-0.3977 -0.09358,-1.22234 z" /> - <path - id="path1380" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m 93.606614,143.98245 v 2.38619 h 0.362608 q 0.502971,0 0.731063,-0.0468 0.397699,-0.0819 0.631639,-0.39185 0.239789,-0.30997 0.239789,-0.75445 0,-0.44449 -0.239789,-0.75446 -0.23394,-0.30997 -0.631639,-0.39185 -0.228092,-0.0468 -0.731063,-0.0468 z m -1.497218,-1.40364 h 1.848129 q 0.865579,0 1.234035,0.0877 0.830488,0.19885 1.351005,0.87142 0.526366,0.66673 0.526366,1.63759 0,0.97085 -0.526366,1.64343 -0.520517,0.66673 -1.351005,0.86558 -0.368456,0.0877 -1.234035,0.0877 h -0.350911 v 3.04123 h -1.497218 z" /> - </g> - <g - id="text1246" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="TOOLS"> - <path - id="path1361" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m 102.95838,142.57881 v 1.40364 h -1.87152 v 6.83106 h -1.497216 v -6.83106 h -1.871522 v -1.40364 z" /> - <path - id="path1363" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m 105.19251,145.18724 q -0.0877,0.47958 -0.0877,1.50892 0,1.02934 0.0877,1.50891 0.10528,0.56731 0.38601,0.93576 0.28657,0.36261 0.74276,0.36261 0.45618,0 0.73691,-0.36261 0.28658,-0.36845 0.39185,-0.93576 0.0877,-0.48542 0.0877,-1.50891 0,-1.02934 -0.0877,-1.50892 -0.10527,-0.5673 -0.39185,-0.92991 -0.28073,-0.36846 -0.73691,-0.36846 -0.45619,0 -0.74276,0.36846 -0.28073,0.36261 -0.38601,0.92991 z m -1.42118,3.55005 q -0.16376,-0.64919 -0.16376,-2.04113 0,-1.39195 0.16376,-2.04113 0.23394,-0.94746 0.88312,-1.5557 0.64919,-0.6141 1.66683,-0.6141 1.01764,0 1.66682,0.6141 0.64919,0.60824 0.88313,1.5557 0.16375,0.64918 0.16375,2.04113 0,1.39194 -0.16375,2.04113 -0.23394,0.94746 -0.88313,1.56155 -0.64918,0.60824 -1.66682,0.60824 -1.01764,0 -1.66683,-0.60824 -0.64918,-0.61409 -0.88312,-1.56155 z" /> - <path - id="path1365" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m 111.17554,145.18724 q -0.0877,0.47958 -0.0877,1.50892 0,1.02934 0.0877,1.50891 0.10527,0.56731 0.386,0.93576 0.28658,0.36261 0.74276,0.36261 0.45618,0 0.73691,-0.36261 0.28658,-0.36845 0.39185,-0.93576 0.0877,-0.48542 0.0877,-1.50891 0,-1.02934 -0.0877,-1.50892 -0.10527,-0.5673 -0.39185,-0.92991 -0.28073,-0.36846 -0.73691,-0.36846 -0.45618,0 -0.74276,0.36846 -0.28073,0.36261 -0.386,0.92991 z m -1.42119,3.55005 q -0.16376,-0.64919 -0.16376,-2.04113 0,-1.39195 0.16376,-2.04113 0.23394,-0.94746 0.88313,-1.5557 0.64918,-0.6141 1.66682,-0.6141 1.01764,0 1.66682,0.6141 0.64919,0.60824 0.88313,1.5557 0.16376,0.64918 0.16376,2.04113 0,1.39194 -0.16376,2.04113 -0.23394,0.94746 -0.88313,1.56155 -0.64918,0.60824 -1.66682,0.60824 -1.01764,0 -1.66682,-0.60824 -0.64919,-0.61409 -0.88313,-1.56155 z" /> - <path - id="path1367" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m 116.0415,150.81351 v -8.2347 h 1.49721 v 6.83106 h 3.36874 v 1.40364 z" /> - <path - id="path1369" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke-width:0.22458273" - d="m 124.15338,150.90708 q -0.95916,0 -2.16395,-0.18715 v -1.42703 q 1.40949,0.21054 2.16395,0.21054 1.04103,0 1.04103,-1.11122 0,-0.30997 -0.15206,-0.58485 -0.14621,-0.28072 -0.41524,-0.386 -0.16961,-0.0643 -0.46789,-0.10527 -0.29827,-0.0409 -0.61409,-0.11697 -0.30997,-0.0819 -0.59655,-0.25149 -1.09952,-0.65503 -1.09952,-2.07037 0,-1.01179 0.64334,-1.70191 0.64918,-0.69013 1.66098,-0.69013 0.78954,0 2.16394,0.14621 v 1.4095 q -1.42118,-0.15207 -2.16394,-0.15207 -0.22225,0 -0.39185,0.0994 -0.16376,0.0994 -0.25149,0.25733 -0.0877,0.15791 -0.12867,0.32167 -0.0351,0.16376 -0.0351,0.32167 0,0.69012 0.46788,0.90067 0.15206,0.0702 0.69013,0.15791 0.54391,0.0819 0.87727,0.25733 0.46788,0.23979 0.77201,0.61994 0.30412,0.38016 0.42109,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13462 -0.70182,1.82474 -0.70182,0.69012 -1.83643,0.69012 z" /> - </g> - <path - id="rect1252" - d="m 45.34529,167.27562 h 82.57136 c 0.28796,0 0.51978,0.23182 0.51978,0.51978 v 22.17088 c 0,0.28796 -0.23182,0.51978 -0.51978,0.51978 H 45.34529 c -0.287959,0 -0.519782,-0.23182 -0.519782,-0.51978 V 167.7954 c 0,-0.28796 0.231823,-0.51978 0.519782,-0.51978 z" - style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.48264033;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="rect1254" - transform="matrix(0.26458333,0,0,0.26458333,-113.7608,68.445953)" - d="M 645.5918 394.0625 C 643.442 394.04209 639.375 394.12305 639.375 394.12305 C 639.17775 394.12305 639.01953 394.29049 639.01953 394.49609 L 639.01953 397.01953 L 635.31445 397.01953 L 635.31445 394.49609 C 635.31445 394.29049 635.15624 394.12305 634.95898 394.12305 L 630.54297 394.12305 C 630.34572 394.12305 630.18555 394.29049 630.18555 394.49609 L 630.18555 403.84961 C 630.18555 404.05522 630.34556 404.2207 630.54297 404.2207 L 634.95898 404.2207 C 635.15624 404.2207 635.31445 404.05522 635.31445 403.84961 L 635.31445 401.32617 L 639.01953 401.32617 L 639.01953 403.84961 C 639.01953 404.05522 639.17778 404.2207 639.375 404.2207 L 641.61133 404.2207 L 641.61133 416.125 C 640.3296 416.63367 639.42578 417.87663 639.42578 419.34375 L 639.42578 437.38086 C 639.42578 439.30248 640.97288 440.84961 642.89453 440.84961 C 644.81618 440.84961 646.36328 439.30248 646.36328 437.38086 L 646.36328 419.34375 C 646.36328 417.87729 645.4605 416.63411 644.17969 416.125 L 644.17969 404.2207 L 646.59961 404.2207 C 646.98522 403.8813 647.35638 402.51763 649.41602 401.47656 C 654.94693 399.57656 657.34877 404.28458 657.30664 403.63477 C 657.26159 402.94103 656.95611 398.58901 652.12695 396.03711 C 650.02303 395.09102 647.54811 394.16171 646.88086 394.12305 C 646.812 394.08676 646.3084 394.0693 645.5918 394.0625 z " - style="fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:#ffffff;stroke-width:0.82411814;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="path1262" - transform="matrix(0.26458333,0,0,0.26458333,-113.7608,68.445953)" - d="M 650.42773 379.59766 C 648.53022 379.62918 647.02835 380.41429 646.11719 381.0918 C 644.90231 381.99503 644.51147 382.513 642.67383 382.51758 C 640.83619 382.52136 640.44591 382.00457 639.22656 381.10742 C 638.00721 380.21024 635.73667 379.12967 632.88867 379.90039 C 630.04067 380.67107 628.62394 382.75097 628.02344 384.14062 C 627.42293 385.53024 627.34502 386.17293 625.75586 387.0957 C 624.1667 388.01851 623.56905 387.76884 622.06445 387.60156 C 620.55988 387.43413 618.04981 387.63205 615.96875 389.72266 C 613.88766 391.8141 613.70025 394.32636 613.875 395.83008 C 614.04973 397.33384 614.30351 397.92972 613.38867 399.52344 C 612.4738 401.11715 611.83391 401.19728 610.44727 401.80469 C 609.06061 402.4121 606.98506 403.83573 606.22852 406.6875 C 605.47198 409.5393 606.5695 411.80659 607.47266 413.02148 C 608.37585 414.23638 608.89191 414.62525 608.89648 416.46289 C 608.90102 418.30053 608.38544 418.69081 607.48828 419.91016 C 606.5911 421.12955 605.50859 423.40197 606.2793 426.25 C 607.05001 429.09795 609.12987 430.51275 610.51953 431.11328 C 611.90917 431.71377 612.55182 431.79367 613.47461 433.38281 C 614.39737 434.97199 614.14777 435.56767 613.98047 437.07227 C 613.81319 438.57682 614.01017 441.08884 616.10156 443.16992 C 618.19302 445.25097 620.70525 445.43258 622.20898 445.25781 C 623.71271 445.0832 624.30862 444.83513 625.90234 445.75 C 627.49607 446.66483 627.57617 447.30282 628.18359 448.68945 C 628.79101 450.07609 630.21463 452.15166 633.06641 452.9082 C 635.91818 453.66475 639.41586 452.22062 639.40039 451.66602 C 639.38452 451.11141 637.04298 452.06501 634.69531 450.58789 C 632.34764 449.11074 632.51563 448.19913 631.99805 447.01758 C 631.48046 445.83598 630.55562 443.61747 627.97266 442.13477 C 625.38972 440.65206 623.0079 440.97218 621.72656 441.12109 C 620.44522 441.27001 620.35948 441.53065 619.03711 440.21484 C 617.71475 438.89904 617.97855 438.81526 618.12109 437.5332 C 618.26366 436.25115 618.56976 433.87044 617.07422 431.29492 C 615.57867 428.71936 613.35795 427.80269 612.17383 427.29102 C 610.9897 426.77931 610.78809 426.96459 610.30078 425.16406 C 609.81348 423.36331 610.07731 423.41598 610.8418 422.37695 C 611.60627 421.33792 613.06601 419.43135 613.05859 416.45312 C 613.05141 413.47486 611.58214 411.57033 610.8125 410.53516 C 610.04287 409.49994 609.77555 409.55704 610.25391 407.75391 C 610.73224 405.95081 610.93757 406.13671 612.11914 405.61914 C 613.30072 405.10157 615.51923 404.17473 617.00195 401.5918 C 618.48465 399.00883 618.16455 396.62899 618.01562 395.34766 C 617.86679 394.06632 617.60802 393.98058 618.92383 392.6582 C 620.23964 391.33582 620.32145 391.59376 621.60352 391.73633 C 622.88556 391.87882 625.26821 392.19087 627.84375 390.69531 C 630.41929 389.19979 631.3301 386.97905 631.8418 385.79492 C 632.3535 384.61076 632.17193 384.40722 633.97266 383.91992 C 635.77337 383.43259 635.72074 383.69841 636.75977 384.46289 C 637.7988 385.22738 639.70535 386.6871 642.68359 386.67969 C 645.66185 386.67213 647.56245 385.20322 648.59766 384.43359 C 649.63286 383.66393 649.57579 383.39666 651.37891 383.875 C 653.18202 384.35334 653 384.55868 653.51758 385.74023 C 654.03516 386.92183 654.96197 389.14034 657.54492 390.62305 C 660.12787 392.10576 662.50968 391.78567 663.79102 391.63672 C 665.07235 391.48781 665.15615 391.22716 666.47852 392.54297 C 667.80089 393.85877 667.53903 393.94256 667.39648 395.22461 C 667.25396 396.50666 666.94586 398.88733 668.44141 401.46289 C 669.93695 404.03841 672.15766 404.95122 673.3418 405.46289 C 674.52593 405.9746 674.72949 405.79303 675.2168 407.59375 C 675.7041 409.39447 675.44026 409.34187 674.67578 410.38086 C 673.9113 411.41989 672.44963 413.32642 672.45703 416.30469 C 672.46459 419.28296 673.93349 421.18158 674.70312 422.2168 C 675.47277 423.25201 675.74006 423.19686 675.26172 425 C 674.78338 426.8031 674.57806 426.6211 673.39648 427.13867 C 672.21491 427.65624 669.99639 428.58309 668.51367 431.16602 C 667.03096 433.74898 667.35303 436.12882 667.50195 437.41016 C 667.65087 438.69149 667.90956 438.77723 666.59375 440.09961 C 665.27794 441.42199 665.19417 441.1601 663.91211 441.01758 C 662.63005 440.87509 660.24937 440.56698 657.67383 442.0625 C 655.09828 443.55806 654.18162 445.77876 653.66992 446.96289 C 653.24864 447.93781 653.23891 448.2526 652.26562 448.60352 C 652.1257 448.52689 651.96815 448.45265 651.77734 448.38086 C 650.1166 447.75603 649.89823 448.59994 648.53516 449.61523 C 647.31184 450.52644 646.44055 450.64225 646.48828 451.83984 C 646.37109 451.82778 646.26002 451.82569 646.17188 451.83594 C 645.89359 451.86995 645.78323 451.91542 645.48828 451.74609 C 645.19333 451.57677 645.17884 451.4578 645.06641 451.20117 C 644.954 450.94454 644.69181 450.55992 644.16406 450.41992 C 643.63627 450.28008 642.98735 450.54759 642.99023 450.65039 C 642.99317 450.75282 643.42686 450.57597 643.86133 450.84961 C 644.2958 451.12287 644.26554 451.29304 644.36133 451.51172 C 644.45706 451.73055 644.62746 452.13963 645.10547 452.41406 C 645.58348 452.68846 646.02458 452.62915 646.26172 452.60156 C 646.44171 452.58148 646.51679 452.58804 646.63867 452.67969 C 647.01093 453.87629 647.88357 453.76112 649.39062 454.32812 C 651.05137 454.95295 651.56075 455.70475 652.92383 454.68945 C 653.92157 453.94627 653.81662 453.40673 653.57617 452.53125 C 655.79685 451.60342 656.96459 449.84265 657.49414 448.61719 C 658.09465 447.22753 658.17256 446.58488 659.76172 445.66211 C 661.35088 444.73934 661.94855 444.98897 663.45312 445.15625 C 664.9577 445.32368 667.46776 445.1258 669.54883 443.03516 C 671.6299 440.94372 671.81735 438.43146 671.64258 436.92773 C 671.46781 435.42401 671.21405 434.83004 672.12891 433.23633 C 673.04377 431.64258 673.68367 431.56057 675.07031 430.95312 C 676.45696 430.34572 678.53253 428.92212 679.28906 426.07031 C 680.0456 423.21855 678.94815 420.95118 678.04492 419.73633 C 677.1417 418.52144 676.62566 418.13257 676.62109 416.29492 C 676.61656 414.45728 677.13213 414.06701 678.0293 412.84766 C 678.92646 411.62831 680.00899 409.3558 679.23828 406.50781 C 678.46757 403.65983 676.3877 402.24502 674.99805 401.64453 C 673.6084 401.044 672.96575 400.96613 672.04297 399.37695 C 671.12019 397.78781 671.36982 397.19014 671.53711 395.68555 C 671.70439 394.18095 671.5074 391.67089 669.41602 389.58984 C 667.32457 387.50876 664.81232 387.32133 663.30859 387.49609 C 661.80487 387.67071 661.20896 387.92268 659.61523 387.00781 C 658.02151 386.09294 657.9414 385.45499 657.33398 384.06836 C 656.72657 382.68169 655.301 380.60616 652.44922 379.84961 C 651.73627 379.66048 651.06024 379.58715 650.42773 379.59766 z " - style="fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.34504497;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <g - id="text1274" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="SETUP"> - <path - id="path1333" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 70.30616,183.09176 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.409491,0.21055 2.163948,0.21055 1.041035,0 1.041035,-1.11122 0,-0.30997 -0.152062,-0.58485 -0.146212,-0.28073 -0.415244,-0.386 -0.169606,-0.0643 -0.46788,-0.10527 -0.298274,-0.0409 -0.614094,-0.11697 -0.30997,-0.0819 -0.596547,-0.25149 -1.09952,-0.65503 -1.09952,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789549,0 2.163948,0.14621 v 1.40949 q -1.421187,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25733 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16376 -0.03509,0.32167 0,0.69012 0.467881,0.90067 0.152061,0.0702 0.690124,0.15791 0.543911,0.0819 0.877276,0.25733 0.46788,0.23979 0.772003,0.61994 0.304122,0.38015 0.421092,0.76616 0.116971,0.386 0.116971,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836432,0.69012 z" /> - <path - id="path1335" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 74.160327,174.76349 h 4.491654 v 1.40364 h -2.994436 v 1.98849 h 2.245827 v 1.40364 h -2.245827 v 2.03528 h 2.994436 v 1.40364 h -4.491654 z" /> - <path - id="path1337" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 85.009308,174.76349 v 1.40364 h -1.871522 v 6.83105 h -1.497218 v -6.83105 h -1.871523 v -1.40364 z" /> - <path - id="path1339" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 85.752069,174.76349 h 1.497218 v 5.24026 q 0,0.54976 0.04094,0.74861 0.08773,0.42109 0.374305,0.67842 0.292425,0.25734 0.707669,0.25734 0.415244,0 0.701821,-0.25734 0.292426,-0.25733 0.380153,-0.67842 0.04094,-0.19885 0.04094,-0.74861 v -5.24026 h 1.497218 v 5.24026 q 0,0.82464 -0.09358,1.22234 -0.19885,0.82464 -0.888973,1.34515 -0.684276,0.52052 -1.637583,0.52052 -0.953306,0 -1.64343,-0.52052 -0.684276,-0.52051 -0.883125,-1.34515 -0.09358,-0.3977 -0.09358,-1.22234 z" /> - <path - id="path1341" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 93.606614,176.16713 v 2.38619 h 0.362608 q 0.502971,0 0.731063,-0.0468 0.397699,-0.0819 0.631639,-0.39185 0.239789,-0.30997 0.239789,-0.75446 0,-0.44448 -0.239789,-0.75445 -0.23394,-0.30998 -0.631639,-0.39185 -0.228092,-0.0468 -0.731063,-0.0468 z m -1.497218,-1.40364 h 1.848129 q 0.865579,0 1.234035,0.0877 0.830488,0.19885 1.351005,0.87143 0.526366,0.66673 0.526366,1.63758 0,0.97086 -0.526366,1.64343 -0.520517,0.66673 -1.351005,0.86558 -0.368456,0.0877 -1.234035,0.0877 h -0.350911 v 3.04122 h -1.497218 z" /> - </g> - <g - id="text1278" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="TOOLS"> - <path - id="path1344" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 102.95838,174.76349 v 1.40364 h -1.87152 v 6.83105 h -1.497216 v -6.83105 h -1.871522 v -1.40364 z" /> - <path - id="path1346" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 105.19251,177.37192 q -0.0877,0.47958 -0.0877,1.50891 0,1.02934 0.0877,1.50892 0.10528,0.56731 0.38601,0.93576 0.28657,0.36261 0.74276,0.36261 0.45618,0 0.73691,-0.36261 0.28658,-0.36845 0.39185,-0.93576 0.0877,-0.48543 0.0877,-1.50892 0,-1.02933 -0.0877,-1.50891 -0.10527,-0.56731 -0.39185,-0.92991 -0.28073,-0.36846 -0.73691,-0.36846 -0.45619,0 -0.74276,0.36846 -0.28073,0.3626 -0.38601,0.92991 z m -1.42118,3.55004 q -0.16376,-0.64918 -0.16376,-2.04113 0,-1.39194 0.16376,-2.04112 0.23394,-0.94746 0.88312,-1.55571 0.64919,-0.61409 1.66683,-0.61409 1.01764,0 1.66682,0.61409 0.64919,0.60825 0.88313,1.55571 0.16375,0.64918 0.16375,2.04112 0,1.39195 -0.16375,2.04113 -0.23394,0.94746 -0.88313,1.56156 -0.64918,0.60824 -1.66682,0.60824 -1.01764,0 -1.66683,-0.60824 -0.64918,-0.6141 -0.88312,-1.56156 z" /> - <path - id="path1348" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 111.17554,177.37192 q -0.0877,0.47958 -0.0877,1.50891 0,1.02934 0.0877,1.50892 0.10527,0.56731 0.386,0.93576 0.28658,0.36261 0.74276,0.36261 0.45618,0 0.73691,-0.36261 0.28658,-0.36845 0.39185,-0.93576 0.0877,-0.48543 0.0877,-1.50892 0,-1.02933 -0.0877,-1.50891 -0.10527,-0.56731 -0.39185,-0.92991 -0.28073,-0.36846 -0.73691,-0.36846 -0.45618,0 -0.74276,0.36846 -0.28073,0.3626 -0.386,0.92991 z m -1.42119,3.55004 q -0.16376,-0.64918 -0.16376,-2.04113 0,-1.39194 0.16376,-2.04112 0.23394,-0.94746 0.88313,-1.55571 0.64918,-0.61409 1.66682,-0.61409 1.01764,0 1.66682,0.61409 0.64919,0.60825 0.88313,1.55571 0.16376,0.64918 0.16376,2.04112 0,1.39195 -0.16376,2.04113 -0.23394,0.94746 -0.88313,1.56156 -0.64918,0.60824 -1.66682,0.60824 -1.01764,0 -1.66682,-0.60824 -0.64919,-0.6141 -0.88313,-1.56156 z" /> - <path - id="path1350" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 116.0415,182.99818 v -8.23469 h 1.49721 v 6.83105 h 3.36874 v 1.40364 z" /> - <path - id="path1352" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 124.15338,183.09176 q -0.95916,0 -2.16395,-0.18715 v -1.42704 q 1.40949,0.21055 2.16395,0.21055 1.04103,0 1.04103,-1.11122 0,-0.30997 -0.15206,-0.58485 -0.14621,-0.28073 -0.41524,-0.386 -0.16961,-0.0643 -0.46789,-0.10527 -0.29827,-0.0409 -0.61409,-0.11697 -0.30997,-0.0819 -0.59655,-0.25149 -1.09952,-0.65503 -1.09952,-2.07037 0,-1.01179 0.64334,-1.70192 0.64918,-0.69012 1.66098,-0.69012 0.78954,0 2.16394,0.14621 v 1.40949 q -1.42118,-0.15206 -2.16394,-0.15206 -0.22225,0 -0.39185,0.0994 -0.16376,0.0994 -0.25149,0.25733 -0.0877,0.15791 -0.12867,0.32167 -0.0351,0.16376 -0.0351,0.32167 0,0.69012 0.46788,0.90067 0.15206,0.0702 0.69013,0.15791 0.54391,0.0819 0.87727,0.25733 0.46788,0.23979 0.77201,0.61994 0.30412,0.38015 0.42109,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.70182,1.82474 -0.70182,0.69012 -1.83643,0.69012 z" /> - </g> +<?xml version="1.0" encoding="UTF-8"?> +<svg id="svg1021" width="242.7mm" height="125.92mm" version="1.1" viewBox="0 0 242.7 125.92" xmlns="http://www.w3.org/2000/svg" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"> + <metadata id="metadata1018"> + <rdf:RDF> + <cc:Work rdf:about=""> + <dc:format>image/svg+xml</dc:format> + <dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/> + <dc:title/> + </cc:Work> + </rdf:RDF> + </metadata> + <g id="layer1" transform="translate(113.76 -68.446)"> + <path id="rect1633" d="m-111.69 133.32h61.237c0.87182 0 1.5737 0.70186 1.5737 1.5737v57.404c0 0.87182-0.70186 1.5737-1.5737 1.5737h-61.237c-0.87182 0-1.5737-0.70186-1.5737-1.5737v-57.404c0-0.87182 0.70186-1.5737 1.5737-1.5737z" fill="#fff" style="paint-order:markers fill stroke"/> + <path id="rect1616" d="m-111.69 68.946h61.237c0.87182 0 1.5737 0.70186 1.5737 1.5737v57.404c0 0.87182-0.70186 1.5737-1.5737 1.5737h-61.237c-0.87182 0-1.5737-0.70186-1.5737-1.5737v-57.404c0-0.87182 0.70186-1.5737 1.5737-1.5737z" fill="#fff" style="paint-order:markers fill stroke"/> + <path id="rect934" transform="matrix(.26458 0 0 .26458 -113.76 68.446)" d="m129.77 43.688c-4.7073-0.044694-13.613 0.13281-13.613 0.13281-0.43194 0-0.77929 0.36438-0.77929 0.81445v5.5254h-8.1113v-5.5254c0-0.45012-0.34735-0.81445-0.7793-0.81445h-9.6719c-0.43195 0-0.7793 0.36438-0.7793 0.81445v20.48c0 0.45012 0.34707 0.8125 0.7793 0.8125h9.6719c0.43195 0 0.7793-0.36232 0.7793-0.8125v-5.5254h8.1113v5.5254c0 0.45012 0.34739 0.8125 0.77929 0.8125h4.8945v26.061c-2.8064 1.1139-4.7832 3.8423-4.7832 7.0547v39.494c0 4.2077 3.388 7.5957 7.5957 7.5957 4.2077 0 7.5957-3.388 7.5957-7.5957v-39.494c0-3.2119-1.9775-5.9405-4.7832-7.0547v-26.061h5.3008c0.84434-0.74337 1.6581-3.7262 6.168-6.0059 12.111-4.1603 17.37 6.1474 17.277 4.7246-0.09865-1.519-0.7716-11.047-11.346-16.635-4.6068-2.0716-10.023-4.1068-11.484-4.1914-0.15078-0.079455-1.2532-0.11791-2.8223-0.13281z" fill="#e5b62f" stroke="#e5b62f" stroke-width="1.8045" style="paint-order:markers fill stroke"/> + <path id="path944" transform="matrix(.26458 0 0 .26458 -113.76 68.446)" d="m140.35 12.014c-4.1549 0.06908-7.4443 1.7901-9.4395 3.2734-2.6602 1.9777-3.5133 3.1091-7.5371 3.1191-4.0238 0.010205-4.8769-1.1215-7.5469-3.0859-2.6699-1.9645-7.6448-4.3321-13.881-2.6445-6.2361 1.6876-9.3375 6.2423-10.652 9.2852-1.3149 3.0428-1.4871 4.4482-4.9668 6.4688-3.4797 2.0206-4.7856 1.4757-8.0801 1.1094-3.2945-0.36631-8.7928 0.065007-13.35 4.6445-4.5568 4.5795-4.9647 10.08-4.582 13.373 0.38259 3.2926 0.93482 4.5962-1.0684 8.0859-2.0032 3.4897-3.4032 3.6661-6.4395 4.9961-3.0363 1.33-7.5798 4.449-9.2363 10.693-1.6566 6.2444 0.74506 11.207 2.7227 13.867 1.9777 2.6602 3.1072 3.5133 3.1172 7.5371 0.011339 4.0238-1.1195 4.8769-3.084 7.5469-1.9645 2.67-4.3321 7.6448-2.6445 13.881 1.6876 6.236 6.2423 9.3374 9.2852 10.652 3.0428 1.3149 4.4482 1.4871 6.4688 4.9668 2.0205 3.4797 1.4757 4.7856 1.1094 8.0801-0.36631 3.2945 0.062774 8.7928 4.6426 13.35 4.5795 4.5568 10.08 4.9549 13.373 4.5723 3.2926-0.38272 4.5962-0.9251 8.0859 1.0781 3.4897 2.0032 3.668 3.4032 4.998 6.4394 1.33 3.0362 4.447 7.5798 10.691 9.2363 6.2444 1.6565 13.901-1.5082 13.867-2.7226-0.03439-1.2144-5.1602 0.87503-10.301-2.3594-5.1406-3.2344-4.7729-5.2312-5.9062-7.8184-1.1333-2.5872-3.1606-7.4448-8.8164-10.691-5.6557-3.2466-10.868-2.5429-13.674-2.2168-2.8057 0.32617-2.9932 0.89285-5.8887-1.9883-2.8955-2.8812-2.3199-3.0638-2.0078-5.8711 0.31219-2.8073 0.98369-8.0206-2.291-13.66-3.2747-5.6395-8.1377-7.6472-10.73-8.7676-2.5928-1.1204-3.0365-0.71142-4.1035-4.6543-1.067-3.943-0.4865-3.8284 1.1875-6.1035 1.6739-2.2751 4.8698-6.4494 4.8535-12.971-0.015874-6.5213-3.2327-10.692-4.918-12.959-1.6852-2.2667-2.2701-2.1417-1.2227-6.0898 1.0474-3.9482 1.4987-3.5405 4.0859-4.6738 2.5872-1.1333 7.4448-3.1626 10.691-8.8184 3.2466-5.6557 2.5429-10.868 2.2168-13.674-0.32591-2.8057-0.89483-2.9932 1.9863-5.8887 2.8812-2.8955 3.0638-2.3297 5.8711-2.0176 2.8072 0.31211 8.0226 0.99541 13.662-2.2793 5.6395-3.2747 7.6354-8.1376 8.7559-10.73 1.1204-2.5928 0.72115-3.0365 4.6641-4.1035 3.9429-1.067 3.8284-0.48836 6.1035 1.1855 2.2751 1.6739 6.4514 4.8717 12.973 4.8555 6.5213-0.016252 10.681-3.2347 12.947-4.9199 2.2667-1.6852 2.1417-2.2701 6.0898-1.2227 3.9482 1.0474 3.5522 1.4987 4.6855 4.0859 1.1333 2.5872 3.1607 7.4448 8.8164 10.691s10.87 2.5429 13.676 2.2168c2.8057-0.3261 2.9932-0.89287 5.8887 1.9883s2.318 3.0638 2.0059 5.8711c-0.31211 2.8072-0.98369 8.0207 2.291 13.66 3.2747 5.6395 8.1376 7.6355 10.73 8.7559 2.5928 1.1204 3.0365 0.72311 4.1035 4.666 1.067 3.9429 0.48642 3.8284-1.1875 6.1035-1.6739 2.2751-4.8697 6.4494-4.8535 12.971 0.01625 6.5213 3.2327 10.681 4.918 12.947 1.6852 2.2667 2.27 2.1436 1.2226 6.0918-1.0474 3.9482-1.4968 3.5503-4.084 4.6836-2.5872 1.1333-7.4448 3.1626-10.691 8.8184-3.2466 5.6557-2.5448 10.868-2.2188 13.674 0.3261 2.8056 0.89483 2.9932-1.9863 5.8887-2.8812 2.8955-3.0638 2.32-5.8711 2.0078-2.8072-0.31219-8.0206-0.9856-13.66 2.2891-5.6395 3.2747-7.6472 8.1376-8.7676 10.73-0.93129 2.1552-0.84174 2.8278-3.0371 3.6055-0.31286-0.1736-0.67425-0.33971-1.1055-0.50195-3.6364-1.3682-4.115 0.48194-7.0996 2.7051-2.684 1.9992-4.6031 2.234-4.4902 4.8711-0.25407-0.02563-0.4922-0.03006-0.68359-0.00781-0.60936 0.07067-0.85026 0.17155-1.4961-0.19922s-0.67964-0.63142-0.92579-1.1934c-0.24616-0.56187-0.82291-1.4024-1.9785-1.709-1.1557-0.30652-2.5727 0.2791-2.5664 0.50391 0.00643 0.2245 0.95492-0.16129 1.9062 0.4375 0.95134 0.5986 0.88402 0.96652 1.0938 1.4453 0.20965 0.47887 0.5842 1.3796 1.6309 1.9805 1.0467 0.60083 2.012 0.47055 2.5312 0.41016 0.39726-0.04627 0.51372-0.11363 0.78516 0.09375 0.79695 2.7289 2.7246 2.4282 6.0664 3.6855 3.6364 1.3682 4.7478 3.0142 7.7324 0.79102 2.1873-1.6292 1.9673-2.8055 1.4394-4.7266 4.8594-2.0322 7.4092-5.8859 8.5684-8.5684 1.3149-3.0428 1.4871-4.4482 4.9668-6.4688 3.4797-2.0206 4.7875-1.4757 8.082-1.1094 3.2945 0.36624 8.7908-0.06484 13.348-4.6445 4.5568-4.5795 4.9667-10.08 4.584-13.373-0.38271-3.2926-0.93681-4.5962 1.0664-8.0859 2.0032-3.4897 3.4032-3.666 6.4395-4.9961 3.0362-1.33 7.5817-4.449 9.2383-10.693 1.6565-6.2444-0.74494-11.207-2.7227-13.867-1.9777-2.6601-3.1091-3.5133-3.1191-7.5371-0.00983-4.0238 1.1195-4.8769 3.084-7.5469 1.9645-2.6699 4.3341-7.6448 2.6465-13.881-1.6876-6.2361-6.2423-9.3375-9.2852-10.652s-4.4502-1.4852-6.4707-4.9648-1.4737-4.7876-1.1074-8.082c0.36631-3.2945-0.065-8.7928-4.6445-13.35-4.5795-4.5568-10.08-4.9647-13.373-4.582-3.2926 0.38271-4.5962 0.93681-8.0859-1.0664-3.4897-2.0032-3.6661-3.4032-4.9961-6.4395-1.33-3.0363-4.449-7.5817-10.693-9.2383-1.5611-0.41413-3.0428-0.57381-4.4277-0.55078z" fill="#336790" style="paint-order:markers fill stroke"/> + <g id="text1023" fill="#336790" stroke-width=".22458" aria-label="SETUP"> + <path id="path1513" d="m-108.11 126.82q-0.95915 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14622-0.28073-0.41525-0.386-0.1696-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.164 0.14621v1.4095q-1.4212-0.15206-2.164-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25148 0.25733-0.0877 0.15791-0.12867 0.32167-0.0351 0.16376-0.0351 0.32167 0 0.69012 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25733 0.46788 0.23979 0.772 0.61994 0.30413 0.38015 0.4211 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70183 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1515" d="m-104.26 118.49h4.4917v1.4036h-2.9944v1.9885h2.2458v1.4036h-2.2458v2.0353h2.9944v1.4036h-4.4917z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1517" d="m-93.406 118.49v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1519" d="m-92.663 118.49h1.4972v5.2403q0 0.54976 0.04094 0.74861 0.08773 0.42109 0.3743 0.67842 0.29242 0.25734 0.70767 0.25734t0.70182-0.25734q0.29243-0.25733 0.38015-0.67842 0.04094-0.19885 0.04094-0.74861v-5.2403h1.4972v5.2403q0 0.82464-0.09358 1.2223-0.19885 0.82464-0.88897 1.3452-0.68428 0.52052-1.6376 0.52052t-1.6434-0.52052q-0.68428-0.52051-0.88312-1.3452-0.09358-0.3977-0.09358-1.2223z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1521" d="m-84.809 119.89v2.3862h0.36261q0.50297 0 0.73106-0.0468 0.3977-0.0819 0.63164-0.39185 0.23979-0.30997 0.23979-0.75446 0-0.44448-0.23979-0.75446-0.23394-0.30997-0.63164-0.39185-0.22809-0.0468-0.73106-0.0468zm-1.4972-1.4036h1.8481q0.86558 0 1.234 0.0877 0.83049 0.19885 1.351 0.87143 0.52637 0.66673 0.52637 1.6376t-0.52637 1.6434q-0.52052 0.66673-1.351 0.86558-0.36846 0.0877-1.234 0.0877h-0.35091v3.0412h-1.4972z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> </g> + <g id="text1661" fill="#e5b62f" stroke-width=".22458" aria-label="TOOLS"> + <path id="path1524" d="m-75.457 118.49v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1526" d="m-73.223 121.1q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94746 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64918 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60824-1.6668 0.60824-1.0176 0-1.6668-0.60824-0.64918-0.6141-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1528" d="m-67.24 121.1q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94746 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64918 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60824-1.6668 0.60824-1.0176 0-1.6668-0.60824-0.64918-0.6141-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1530" d="m-62.374 126.73v-8.2347h1.4972v6.8311h3.3687v1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1532" d="m-54.262 126.82q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25733-0.08773 0.15791-0.12867 0.32167-0.03509 0.16376-0.03509 0.32167 0 0.69012 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25733 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + <path id="rect905" transform="matrix(.26458 0 0 .26458 -113.76 68.446)" d="m129.77 286.97c-4.7073-0.0447-13.613 0.13476-13.613 0.13476-0.43194 0-0.77929 0.36236-0.77929 0.8125v5.5274h-8.1113v-5.5274c0-0.4501-0.34735-0.8125-0.7793-0.8125h-9.6719c-0.43195 0-0.7793 0.36236-0.7793 0.8125v20.482c0 0.45011 0.34707 0.81055 0.7793 0.81055h9.6719c0.43195 0 0.7793-0.36052 0.7793-0.81055v-5.5273h8.1113v5.5273c0 0.45011 0.34739 0.81055 0.77929 0.81055h4.8945v26.061c-2.8064 1.1138-4.7832 3.8423-4.7832 7.0547v39.496c0 4.2078 3.388 7.5938 7.5957 7.5938 4.2077 0 7.5957-3.386 7.5957-7.5938v-39.496c0-3.2119-1.9775-5.9405-4.7832-7.0547v-26.061h5.3008c0.84434-0.74336 1.6581-3.7242 6.168-6.0039 12.111-4.1602 17.37 6.1455 17.277 4.7227-0.09865-1.519-0.7716-11.045-11.346-16.633-4.6068-2.0716-10.023-4.1068-11.484-4.1914-0.15078-0.07946-1.2532-0.11986-2.8223-0.13476z" stroke="#000" stroke-width="1.8045" style="paint-order:markers fill stroke"/> + <path id="path913" transform="matrix(.26458 0 0 .26458 -113.76 68.446)" d="m140.35 255.3c-4.1549 0.06908-7.4443 1.7902-9.4395 3.2734-2.6602 1.9777-3.5133 3.1091-7.5371 3.1191-4.0238 0.01133-4.8769-1.1195-7.5469-3.084-2.6699-1.9645-7.6448-4.3321-13.881-2.6445-6.2361 1.6876-9.3375 6.2404-10.652 9.2832-1.3149 3.0428-1.4871 4.4502-4.9668 6.4707-3.4797 2.0206-4.7856 1.4737-8.0801 1.1074-3.2945-0.36624-8.7928 0.06484-13.35 4.6445-4.5568 4.5795-4.9647 10.08-4.582 13.373 0.38259 3.2926 0.93482 4.5962-1.0684 8.0859-2.0032 3.4897-3.4032 3.6661-6.4395 4.9961-3.0363 1.33-7.5798 4.4489-9.2363 10.693-1.6566 6.2444 0.74506 11.207 2.7227 13.867 1.9777 2.6601 3.1072 3.5133 3.1172 7.5371 0.011339 4.0238-1.1195 4.8789-3.084 7.5488-1.9645 2.67-4.3321 7.6428-2.6445 13.879 1.6876 6.2361 6.2423 9.3374 9.2852 10.652 3.0428 1.3149 4.4482 1.4871 6.4688 4.9668 2.0205 3.4797 1.4757 4.7875 1.1094 8.082-0.36631 3.2945 0.062774 8.7908 4.6426 13.348 4.5795 4.5568 10.08 4.9549 13.373 4.5723 3.2926-0.38271 4.5962-0.92514 8.0859 1.0781 3.4897 2.0032 3.668 3.4032 4.998 6.4394 1.33 3.0363 4.447 7.5797 10.691 9.2363 6.2444 1.6565 13.901-1.5063 13.867-2.7207-0.03439-1.2144-5.1602 0.87304-10.301-2.3613-5.1406-3.2344-4.7729-5.2312-5.9062-7.8184-1.1333-2.5872-3.1606-7.4448-8.8164-10.691-5.6557-3.2466-10.868-2.5429-13.674-2.2168-2.8057 0.32618-2.9932 0.89481-5.8887-1.9863-2.8955-2.8812-2.3199-3.0638-2.0078-5.8711 0.31219-2.8073 0.98369-8.0226-2.291-13.662-3.2747-5.6395-8.1377-7.6452-10.73-8.7656-2.5928-1.1204-3.0365-0.71142-4.1035-4.6543-1.067-3.943-0.4865-3.8303 1.1875-6.1055 1.6739-2.2751 4.8698-6.4494 4.8535-12.971-0.015874-6.5214-3.2327-10.69-4.918-12.957-1.6852-2.2667-2.2701-2.1436-1.2227-6.0918 1.0474-3.9482 1.4987-3.5405 4.0859-4.6738 2.5872-1.1333 7.4448-3.1606 10.691-8.8164 3.2466-5.6557 2.5429-10.87 2.2168-13.676-0.32591-2.8057-0.89483-2.9931 1.9863-5.8887 2.8812-2.8955 3.0638-2.3297 5.8711-2.0176 2.8072 0.31219 8.0226 0.9954 13.662-2.2793 5.6395-3.2747 7.6354-8.1377 8.7559-10.73 1.1204-2.5928 0.72115-3.0365 4.6641-4.1035 3.9429-1.067 3.8284-0.48641 6.1035 1.1875 2.2751 1.6739 6.4514 4.8697 12.973 4.8535 6.5213-0.01512 10.681-3.2328 12.947-4.918 2.2667-1.6852 2.1417-2.2701 6.0898-1.2226 3.9482 1.0474 3.5522 1.4967 4.6855 4.084 1.1333 2.5872 3.1607 7.4448 8.8164 10.691 5.6557 3.2466 10.87 2.5448 13.676 2.2188 2.8057-0.32618 2.9932-0.89485 5.8887 1.9863 2.8955 2.8811 2.318 3.0638 2.0059 5.8711-0.31211 2.8072-0.98369 8.0206 2.291 13.66 3.2747 5.6395 8.1376 7.6355 10.73 8.7559 2.5928 1.1205 3.0365 0.7231 4.1035 4.666 1.067 3.9429 0.48642 3.8284-1.1875 6.1035-1.6739 2.2751-4.8697 6.4494-4.8535 12.971 0.01625 6.5213 3.2327 10.682 4.918 12.949 1.6852 2.2667 2.27 2.1417 1.2226 6.0898-1.0474 3.9482-1.4968 3.5522-4.084 4.6856-2.5872 1.1333-7.4448 3.1607-10.691 8.8164-3.2466 5.6557-2.5448 10.868-2.2188 13.674 0.3261 2.8057 0.89483 2.9932-1.9863 5.8887-2.8812 2.8955-3.0638 2.32-5.8711 2.0078-2.8072-0.31219-8.0206-0.98365-13.66 2.291-5.6395 3.2747-7.6472 8.1376-8.7676 10.73-0.93126 2.1551-0.84198 2.8259-3.0371 3.6035-0.31272-0.17348-0.67455-0.33983-1.1055-0.50196-3.6364-1.3681-4.115 0.48194-7.0996 2.7051-2.6846 1.9997-4.6041 2.2342-4.4902 4.873-0.2541-0.02564-0.49218-0.03202-0.68359-0.00977-0.60936 0.07068-0.85026 0.17156-1.4961-0.19922-0.64583-0.37077-0.67964-0.62946-0.92579-1.1914-0.24616-0.56187-0.82291-1.4044-1.9785-1.7109-1.1557-0.30652-2.5727 0.2791-2.5664 0.50391 0.00643 0.2245 0.95492-0.16129 1.9062 0.4375 0.95134 0.5986 0.88402 0.96847 1.0938 1.4473 0.20965 0.47887 0.5842 1.3777 1.6309 1.9785 1.0467 0.60083 2.012 0.47055 2.5312 0.41015 0.39726-0.04626 0.51372-0.11362 0.78516 0.09375 0.79695 2.7289 2.7246 2.4282 6.0664 3.6856 3.6364 1.3682 4.7478 3.0142 7.7324 0.79102 2.1872-1.6292 1.9673-2.8037 1.4394-4.7246 4.8594-2.0321 7.4092-5.8859 8.5684-8.5684 1.3149-3.0428 1.4871-4.4502 4.9668-6.4707 3.4797-2.0206 4.7875-1.4757 8.082-1.1094 3.2945 0.36624 8.7908-0.06293 13.348-4.6426 4.5568-4.5795 4.9667-10.082 4.584-13.375-0.38271-3.2926-0.93681-4.5942 1.0664-8.084 2.0032-3.4896 3.4032-3.668 6.4395-4.998 3.0362-1.33 7.5817-4.447 9.2383-10.691 1.6565-6.2443-0.74494-11.207-2.7227-13.867-1.9777-2.6602-3.1091-3.5133-3.1191-7.5371-0.00983-4.0237 1.1195-4.8789 3.084-7.5488 1.9645-2.6699 4.3341-7.6448 2.6465-13.881-1.6876-6.236-6.2423-9.3355-9.2852-10.65-3.0428-1.3149-4.4502-1.4871-6.4707-4.9668-2.0206-3.4797-1.4737-4.7876-1.1074-8.082 0.36631-3.2945-0.065-8.7909-4.6445-13.348-4.5795-4.5568-10.08-4.9667-13.373-4.584-3.2926 0.38271-4.5962 0.93678-8.0859-1.0664-3.4897-2.0032-3.6661-3.4032-4.9961-6.4394s-4.449-7.5798-10.693-9.2363c-1.5611-0.41413-3.0428-0.57576-4.4277-0.55273z" style="paint-order:markers fill stroke"/> + <g id="text925" stroke-width=".22458" aria-label="SETUP"> + <path id="path1488" d="m-108.11 191.19q-0.95915 0-2.1639-0.18715v-1.427q1.4095 0.21054 2.1639 0.21054 1.041 0 1.041-1.1112 0-0.30998-0.15206-0.58486-0.14622-0.28072-0.41525-0.386-0.1696-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69013 1.661-0.69013 0.78955 0 2.164 0.14622v1.4095q-1.4212-0.15207-2.164-0.15207-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25148 0.25733-0.0877 0.15791-0.12867 0.32167-0.0351 0.16376-0.0351 0.32167 0 0.69012 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25733 0.46788 0.23979 0.772 0.61995 0.30413 0.38015 0.4211 0.76615t0.11697 0.80125q0 1.1346-0.70183 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1490" d="m-104.26 182.86h4.4917v1.4036h-2.9944v1.9885h2.2458v1.4036h-2.2458v2.0353h2.9944v1.4036h-4.4917z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1492" d="m-93.406 182.86v1.4036h-1.8715v6.8311h-1.4972v-6.8311h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1494" d="m-92.663 182.86h1.4972v5.2403q0 0.54976 0.04094 0.74861 0.08773 0.42109 0.3743 0.67843 0.29242 0.25733 0.70767 0.25733t0.70182-0.25733q0.29243-0.25734 0.38015-0.67843 0.04094-0.19885 0.04094-0.74861v-5.2403h1.4972v5.2403q0 0.82464-0.09358 1.2223-0.19885 0.82464-0.88897 1.3452-0.68428 0.52051-1.6376 0.52051t-1.6434-0.52051q-0.68428-0.52052-0.88312-1.3452-0.09358-0.3977-0.09358-1.2223z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1496" d="m-84.809 184.26v2.3862h0.36261q0.50297 0 0.73106-0.0468 0.3977-0.0819 0.63164-0.39185 0.23979-0.30997 0.23979-0.75445 0-0.44449-0.23979-0.75446-0.23394-0.30997-0.63164-0.39185-0.22809-0.0468-0.73106-0.0468zm-1.4972-1.4036h1.8481q0.86558 0 1.234 0.0877 0.83049 0.19885 1.351 0.87142 0.52637 0.66673 0.52637 1.6376 0 0.97085-0.52637 1.6434-0.52052 0.66673-1.351 0.86558-0.36846 0.0877-1.234 0.0877h-0.35091v3.0412h-1.4972z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + <g id="text929" stroke-width=".22458" aria-label="TOOLS"> + <path id="path1499" d="m-75.457 182.86v1.4036h-1.8715v6.8311h-1.4972v-6.8311h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1501" d="m-73.223 185.47q-0.08773 0.47958-0.08773 1.5089t0.08773 1.5089q0.10527 0.56731 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36845 0.39185-0.93576 0.08773-0.48542 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.5673-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.36261-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64919-0.16376-2.0411 0-1.392 0.16376-2.0411 0.23394-0.94746 0.88312-1.5557 0.64918-0.6141 1.6668-0.6141 1.0176 0 1.6668 0.6141 0.64918 0.60824 0.88312 1.5557 0.16376 0.64918 0.16376 2.0411 0 1.3919-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60824-1.6668 0.60824-1.0176 0-1.6668-0.60824-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1503" d="m-67.24 185.47q-0.08773 0.47958-0.08773 1.5089t0.08773 1.5089q0.10527 0.56731 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36845 0.39185-0.93576 0.08773-0.48542 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.5673-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.36261-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64919-0.16376-2.0411 0-1.392 0.16376-2.0411 0.23394-0.94746 0.88312-1.5557 0.64918-0.6141 1.6668-0.6141 1.0176 0 1.6668 0.6141 0.64918 0.60824 0.88312 1.5557 0.16376 0.64918 0.16376 2.0411 0 1.3919-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60824-1.6668 0.60824-1.0176 0-1.6668-0.60824-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1505" d="m-62.374 191.1v-8.2347h1.4972v6.8311h3.3687v1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1507" d="m-54.262 191.19q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21054 2.1639 0.21054 1.041 0 1.041-1.1112 0-0.30998-0.15206-0.58486-0.14621-0.28072-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69013 1.661-0.69013 0.78955 0 2.1639 0.14622v1.4095q-1.4212-0.15207-2.1639-0.15207-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25733-0.08773 0.15791-0.12867 0.32167-0.03509 0.16376-0.03509 0.32167 0 0.69012 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25733 0.46788 0.23979 0.772 0.61995 0.30412 0.38015 0.42109 0.76615t0.11697 0.80125q0 1.1346-0.70182 1.8247t-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + <path id="rect830" d="m-34.068 68.946h61.237c0.87182 0 1.5737 0.70186 1.5737 1.5737v57.404c0 0.87182-0.70186 1.5737-1.5737 1.5737h-61.237c-0.87182 0-1.5737-0.70186-1.5737-1.5737v-57.404c0-0.87182 0.70186-1.5737 1.5737-1.5737z" fill="#336790" style="paint-order:markers fill stroke"/> + <path id="rect935" transform="matrix(.26458 0 0 .26458 -113.76 68.446)" d="m423.13 43.688c-4.7073-0.044694-13.613 0.13281-13.613 0.13281-0.43195 0-0.7793 0.36438-0.7793 0.81445v5.5254h-8.1113v-5.5254c0-0.45012-0.34735-0.81445-0.77929-0.81445h-9.6719c-0.43195 0-0.7793 0.36438-0.7793 0.81445v20.48c0 0.45012 0.34707 0.8125 0.7793 0.8125h9.6719c0.43194 0 0.77929-0.36232 0.77929-0.8125v-5.5254h8.1113v5.5254c0 0.45012 0.34739 0.8125 0.7793 0.8125h4.8945v26.061c-2.8064 1.1139-4.7832 3.8423-4.7832 7.0547v39.494c0 4.2077 3.388 7.5957 7.5957 7.5957s7.5938-3.388 7.5938-7.5957v-39.494c0-3.2125-1.9766-5.9409-4.7832-7.0547v-26.061h5.3008c0.84434-0.74337 1.66-3.7262 6.1699-6.0059 12.111-4.1603 17.37 6.1474 17.277 4.7246-0.09864-1.519-0.77159-11.047-11.346-16.635-4.6068-2.0716-10.023-4.1068-11.484-4.1914-0.15078-0.079455-1.2532-0.11791-2.8223-0.13281z" fill="#e5b62f" stroke="#e5b62f" stroke-width="1.8045" style="paint-order:markers fill stroke"/> + <path id="path943" transform="matrix(.26458 0 0 .26458 -113.76 68.446)" d="m433.72 12.014c-4.1549 0.06908-7.4443 1.7901-9.4394 3.2734-2.6602 1.9777-3.5133 3.1091-7.5371 3.1191-4.0238 0.010205-4.8789-1.1215-7.5488-3.0859-2.6699-1.9645-7.6428-4.3321-13.879-2.6445-6.2361 1.6876-9.3374 6.2423-10.652 9.2852-1.3149 3.0428-1.4871 4.4482-4.9668 6.4688-3.4797 2.0206-4.7875 1.4757-8.082 1.1094-3.2945-0.36631-8.7909 0.065007-13.348 4.6445-4.5568 4.5795-4.9666 10.08-4.584 13.373 0.38259 3.2926 0.93677 4.5962-1.0664 8.0859-2.0032 3.4897-3.4032 3.6661-6.4394 4.9961-3.0363 1.33-7.5798 4.449-9.2363 10.693-1.6566 6.2444 0.74308 11.207 2.7207 13.867 1.9777 2.6602 3.1091 3.5133 3.1191 7.5371 0.00982 4.0238-1.1196 4.8769-3.084 7.5469-1.9645 2.67-4.3321 7.6448-2.6445 13.881 1.6876 6.236 6.2403 9.3374 9.2832 10.652 3.0428 1.3149 4.4501 1.4871 6.4707 4.9668 2.0205 3.4797 1.4757 4.7856 1.1094 8.0801-0.36631 3.2945 0.06278 8.7928 4.6426 13.35 4.5795 4.5568 10.08 4.9549 13.373 4.5723 3.2926-0.38272 4.5962-0.9251 8.0859 1.0781 3.4897 2.0032 3.668 3.4032 4.998 6.4394 1.33 3.0362 4.447 7.5798 10.691 9.2363 6.2444 1.6565 13.901-1.5082 13.867-2.7226-0.0344-1.2144-5.1602 0.87503-10.301-2.3594-5.1405-3.2344-4.7729-5.2312-5.9062-7.8184-1.1333-2.5872-3.1606-7.4448-8.8164-10.691-5.6557-3.2466-10.87-2.5429-13.676-2.2168-2.8057 0.32617-2.9932 0.8948-5.8887-1.9863-2.8955-2.8812-2.318-3.0658-2.0059-5.873 0.31218-2.8073 0.98369-8.0206-2.291-13.66s-8.1377-7.6472-10.73-8.7676c-2.5928-1.1204-3.0365-0.71142-4.1035-4.6543-1.067-3.943-0.48649-3.8284 1.1875-6.1035 1.6739-2.2751 4.8698-6.4494 4.8535-12.971-0.01587-6.5213-3.2327-10.692-4.918-12.959-1.6852-2.2667-2.272-2.1417-1.2246-6.0898 1.0474-3.9482 1.4987-3.5405 4.0859-4.6738 2.5872-1.1333 7.4467-3.1626 10.693-8.8184 3.2466-5.6557 2.5429-10.868 2.2168-13.674-0.32591-2.8057-0.89483-2.9932 1.9863-5.8887 2.8812-2.8955 3.0638-2.3297 5.8711-2.0176 2.8072 0.31211 8.0226 0.99541 13.662-2.2793 5.6395-3.2747 7.6354-8.1376 8.7559-10.73 1.1204-2.5928 0.72114-3.0365 4.6641-4.1035 3.9429-1.067 3.8284-0.48836 6.1035 1.1855 2.2751 1.6739 6.4494 4.8717 12.971 4.8555 6.5213-0.016252 10.682-3.2347 12.949-4.9199 2.2667-1.6852 2.1417-2.2701 6.0898-1.2227 3.9482 1.0474 3.5522 1.4987 4.6856 4.0859 1.1333 2.5872 3.1607 7.4448 8.8164 10.691s10.87 2.5429 13.676 2.2168c2.8057-0.3261 2.9932-0.89287 5.8887 1.9883s2.318 3.0638 2.0059 5.8711c-0.31212 2.8072-0.98369 8.0207 2.291 13.66s8.1376 7.6355 10.73 8.7559c2.5928 1.1204 3.0365 0.72311 4.1035 4.666 1.067 3.9429 0.48641 3.8284-1.1875 6.1035-1.6739 2.2751-4.8697 6.4494-4.8535 12.971 0.01625 6.5213 3.2327 10.681 4.918 12.947 1.6852 2.2667 2.27 2.1436 1.2227 6.0918-1.0474 3.9482-1.4968 3.5503-4.084 4.6836-2.5872 1.1333-7.4448 3.1626-10.691 8.8184-3.2466 5.6557-2.5448 10.868-2.2188 13.674 0.32609 2.8056 0.89482 2.9932-1.9863 5.8887-2.8812 2.8955-3.0638 2.32-5.8711 2.0078-2.8072-0.31219-8.0226-0.9856-13.662 2.2891-5.6395 3.2747-7.6452 8.1376-8.7656 10.73-0.9313 2.1552-0.84174 2.8278-3.0371 3.6055-0.31286-0.1736-0.67424-0.33971-1.1055-0.50195-3.6364-1.3682-4.115 0.48194-7.0996 2.7051-2.684 1.9992-4.605 2.234-4.4922 4.8711-0.25335-0.0254-0.49071-0.03001-0.68164-0.00781-0.60936 0.07067-0.85221 0.17155-1.498-0.19922s-0.67769-0.63142-0.92383-1.1934c-0.24616-0.56187-0.82291-1.4024-1.9785-1.709-1.1557-0.30652-2.5727 0.2791-2.5664 0.50391 0.00642 0.2245 0.95492-0.16129 1.9062 0.4375 0.95133 0.5986 0.88401 0.96652 1.0938 1.4453 0.20965 0.47887 0.58419 1.3796 1.6309 1.9805 1.0467 0.60083 2.012 0.47055 2.5312 0.41016 0.39726-0.04627 0.51372-0.11363 0.78515 0.09375 0.79696 2.7289 2.7246 2.4282 6.0664 3.6855 3.6364 1.3682 4.7478 3.0142 7.7324 0.79102 2.1873-1.6292 1.9673-2.8055 1.4394-4.7266 4.8594-2.0322 7.4092-5.8859 8.5684-8.5684 1.3149-3.0428 1.4871-4.4482 4.9668-6.4688 3.4797-2.0206 4.7876-1.4757 8.082-1.1094 3.2945 0.36624 8.7909-0.06484 13.348-4.6445 4.5568-4.5795 4.9667-10.08 4.584-13.373-0.38271-3.2926-0.93681-4.5962 1.0664-8.0859 2.0032-3.4897 3.4032-3.666 6.4394-4.9961 3.0363-1.33 7.5798-4.449 9.2363-10.693 1.6565-6.2444-0.74299-11.207-2.7207-13.867-1.9777-2.6601-3.1091-3.5133-3.1191-7.5371-0.00983-4.0238 1.1195-4.8769 3.084-7.5469 1.9645-2.6699 4.3321-7.6448 2.6445-13.881-1.6876-6.2361-6.2404-9.3375-9.2832-10.652-3.0428-1.3149-4.4502-1.4852-6.4707-4.9648s-1.4737-4.7876-1.1074-8.082c0.36631-3.2945-0.065-8.7928-4.6445-13.35-4.5795-4.5568-10.08-4.9647-13.373-4.582-3.2926 0.38271-4.5962 0.93681-8.0859-1.0664-3.4897-2.0032-3.6661-3.4032-4.9961-6.4395-1.33-3.0363-4.449-7.5817-10.693-9.2383-1.5611-0.41413-3.0428-0.57381-4.4277-0.55078z" fill="#fff" style="paint-order:markers fill stroke"/> + <g id="text955" fill="#fff" stroke-width=".22458" aria-label="SETUP"> + <path id="path1476" d="m-30.49 126.82q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10528-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25148-1.0995-0.65504-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76615 0.11697 0.38601 0.11697 0.80125 0 1.1346-0.70182 1.8247-0.70182 0.69013-1.8364 0.69013z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1478" d="m-26.636 118.49h4.4917v1.4036h-2.9944v1.9885h2.2458v1.4036h-2.2458v2.0353h2.9944v1.4036h-4.4917z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1480" d="m-15.787 118.49v1.4036h-1.8715v6.8311h-1.4972v-6.8311h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1482" d="m-15.044 118.49h1.4972v5.2403q0 0.54976 0.04094 0.7486 0.08773 0.4211 0.3743 0.67843 0.29243 0.25734 0.70767 0.25734t0.70182-0.25734q0.29242-0.25733 0.38015-0.67843 0.04094-0.19884 0.04094-0.7486v-5.2403h1.4972v5.2403q0 0.82463-0.093576 1.2223-0.19885 0.82464-0.88897 1.3452-0.68428 0.52052-1.6376 0.52052-0.95331 0-1.6434-0.52052-0.68428-0.52052-0.88312-1.3452-0.09358-0.3977-0.09358-1.2223z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1484" d="m-7.1892 119.89v2.3862h0.36261q0.50297 0 0.73106-0.0468 0.3977-0.0819 0.63164-0.39185 0.23979-0.30997 0.23979-0.75446t-0.23979-0.75446q-0.23394-0.30997-0.63164-0.39185-0.22809-0.0468-0.73106-0.0468zm-1.4972-1.4036h1.8481q0.86558 0 1.234 0.0877 0.83049 0.19885 1.351 0.87143 0.52637 0.66673 0.52637 1.6376t-0.52637 1.6434q-0.52052 0.66673-1.351 0.86558-0.36846 0.0877-1.234 0.0877h-0.35091v3.0412h-1.4972z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + <g id="text959" fill="#e5b62f" stroke-width=".22458" aria-label="TOOLS"> + <path id="path1465" d="m2.1625 118.49v1.4036h-1.8715v6.8311h-1.4972v-6.8311h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1467" d="m4.3967 121.1q-0.087728 0.47957-0.087728 1.5089t0.087728 1.5089q0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261t0.73691-0.36261q0.28658-0.36846 0.39185-0.93576 0.087728-0.48543 0.087728-1.5089 0-1.0293-0.087728-1.5089-0.10527-0.56731-0.39185-0.92992-0.28073-0.36845-0.73691-0.36845t-0.74276 0.36845q-0.28073 0.36261-0.386 0.92992zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94746 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409t1.6668 0.61409q0.64918 0.60824 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825t-1.6668-0.60825q-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1469" d="m10.38 121.1q-0.08773 0.47957-0.08773 1.5089t0.08773 1.5089q0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92992-0.28073-0.36845-0.73691-0.36845-0.45618 0-0.74276 0.36845-0.28073 0.36261-0.386 0.92992zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94746 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60824 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825-1.0176 0-1.6668-0.60825-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1471" d="m15.246 126.73v-8.2347h1.4972v6.8311h3.3687v1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1473" d="m23.358 126.82q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10528-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25148-1.0995-0.65504-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76615 0.11697 0.38601 0.11697 0.80125 0 1.1346-0.70182 1.8247-0.70182 0.69013-1.8364 0.69013z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + <path id="rect1036" d="m-34.068 133.32h61.237c0.87182 0 1.5737 0.70186 1.5737 1.5737v57.404c0 0.87182-0.70186 1.5737-1.5737 1.5737h-61.237c-0.87182 0-1.5737-0.70186-1.5737-1.5737v-57.404c0-0.87182 0.70186-1.5737 1.5737-1.5737z" style="paint-order:markers fill stroke"/> + <path id="rect1038" transform="matrix(.26458 0 0 .26458 -113.76 68.446)" d="m423.13 286.97c-4.7073-0.0447-13.613 0.13476-13.613 0.13476-0.43195 0-0.7793 0.36236-0.7793 0.8125v5.5274h-8.1113v-5.5274c0-0.4501-0.34735-0.8125-0.77929-0.8125h-9.6719c-0.43195 0-0.7793 0.36236-0.7793 0.8125v20.482c0 0.45015 0.34707 0.81055 0.7793 0.81055h9.6719c0.43194 0 0.77929-0.36052 0.77929-0.81055v-5.5273h8.1113v5.5273c0 0.45015 0.34739 0.81055 0.7793 0.81055h4.8945v26.061c-2.8064 1.1139-4.7832 3.8423-4.7832 7.0547v39.496c0 4.2077 3.388 7.5938 7.5957 7.5938s7.5938-3.386 7.5938-7.5938v-39.496c0-3.2125-1.9766-5.9409-4.7832-7.0547v-26.061h5.3008c0.84434-0.74336 1.66-3.7243 6.1699-6.0039 12.111-4.1603 17.37 6.1455 17.277 4.7227-0.09864-1.519-0.77159-11.045-11.346-16.633-4.6068-2.0716-10.023-4.1068-11.484-4.1914-0.15078-0.07946-1.2532-0.11986-2.8223-0.13476z" fill="#fff" stroke="#fff" stroke-width="1.8045" style="paint-order:markers fill stroke"/> + <path id="path1046" transform="matrix(.26458 0 0 .26458 -113.76 68.446)" d="m433.72 255.3c-4.1549 0.06908-7.4443 1.7902-9.4394 3.2734-2.6602 1.9777-3.5133 3.1091-7.5371 3.1191-4.0238 0.01133-4.8789-1.1195-7.5488-3.084-2.6699-1.9645-7.6428-4.3321-13.879-2.6445s-9.3374 6.2404-10.652 9.2832c-1.3149 3.0428-1.4871 4.4502-4.9668 6.4707-3.4797 2.0206-4.7875 1.4737-8.082 1.1074-3.2945-0.36624-8.7909 0.06484-13.348 4.6445-4.5568 4.5795-4.9666 10.08-4.584 13.373 0.38259 3.2926 0.93677 4.5962-1.0664 8.0859-2.0032 3.4897-3.4032 3.6661-6.4394 4.9961-3.0363 1.33-7.5798 4.449-9.2363 10.693-1.6566 6.2444 0.74308 11.207 2.7207 13.867 1.9777 2.6601 3.1091 3.5133 3.1191 7.5371 0.00982 4.0238-1.1196 4.8789-3.084 7.5488-1.9645 2.67-4.3321 7.6428-2.6445 13.879 1.6876 6.236 6.2403 9.3374 9.2832 10.652 3.0428 1.3149 4.4501 1.4871 6.4707 4.9668 2.0205 3.4797 1.4757 4.7875 1.1094 8.082-0.36631 3.2945 0.06278 8.7909 4.6426 13.348 4.5795 4.5568 10.08 4.9549 13.373 4.5723 3.2926-0.38271 4.5962-0.9251 8.0859 1.0781 3.4897 2.0032 3.668 3.4032 4.998 6.4394 1.33 3.0362 4.447 7.5798 10.691 9.2363 6.2444 1.6565 13.901-1.5063 13.867-2.7207-0.0344-1.2144-5.1602 0.87307-10.301-2.3613-5.1405-3.2344-4.7729-5.2312-5.9062-7.8184-1.1333-2.5872-3.1606-7.4448-8.8164-10.691-5.6557-3.2466-10.87-2.5429-13.676-2.2168-2.8057 0.32618-2.9932 0.89481-5.8887-1.9863-2.8955-2.8812-2.318-3.0638-2.0059-5.8711 0.31218-2.8073 0.98369-8.0226-2.291-13.662-3.2747-5.6395-8.1377-7.6452-10.73-8.7656-2.5928-1.1204-3.0365-0.71142-4.1035-4.6543-1.067-3.943-0.48649-3.8303 1.1875-6.1055 1.6739-2.2751 4.8698-6.4494 4.8535-12.971-0.01587-6.5214-3.2327-10.69-4.918-12.957-1.6852-2.2667-2.272-2.1436-1.2246-6.0918 1.0474-3.9482 1.4987-3.5405 4.0859-4.6738 2.5872-1.1333 7.4467-3.1607 10.693-8.8164 3.2466-5.6558 2.5429-10.87 2.2168-13.676-0.32591-2.8057-0.89483-2.9931 1.9863-5.8887 2.8812-2.8955 3.0638-2.3297 5.8711-2.0176 2.8072 0.31219 8.0226 0.9954 13.662-2.2793s7.6354-8.1377 8.7559-10.73c1.1204-2.5928 0.72114-3.0365 4.6641-4.1035 3.9429-1.067 3.8284-0.48641 6.1035 1.1875 2.2751 1.6739 6.4494 4.8698 12.971 4.8535 6.5213-0.01512 10.682-3.2327 12.949-4.918 2.2667-1.6852 2.1417-2.2701 6.0898-1.2226 3.9482 1.0474 3.5522 1.4967 4.6856 4.084 1.1333 2.5872 3.1607 7.4448 8.8164 10.691 5.6557 3.2466 10.87 2.5448 13.676 2.2188 2.8057-0.32618 2.9932-0.89485 5.8887 1.9863 2.8955 2.8811 2.318 3.0638 2.0059 5.8711-0.31212 2.8072-0.98369 8.0207 2.291 13.66 3.2747 5.6395 8.1376 7.6354 10.73 8.7559 2.5928 1.1204 3.0365 0.7231 4.1035 4.666 1.067 3.9429 0.48641 3.8284-1.1875 6.1035-1.6739 2.2751-4.8697 6.4494-4.8535 12.971 0.01625 6.5213 3.2327 10.682 4.918 12.949 1.6852 2.2667 2.27 2.1417 1.2227 6.0898-1.0474 3.9482-1.4968 3.5522-4.084 4.6856-2.5872 1.1333-7.4448 3.1607-10.691 8.8164-3.2466 5.6557-2.5448 10.868-2.2188 13.674 0.32609 2.8057 0.89482 2.9932-1.9863 5.8887-2.8812 2.8955-3.0638 2.32-5.8711 2.0078-2.8072-0.31219-8.0226-0.98365-13.662 2.291-5.6395 3.2747-7.6452 8.1376-8.7656 10.73-0.93126 2.1551-0.84199 2.8259-3.0371 3.6035-0.31271-0.17348-0.67455-0.33983-1.1055-0.50196-3.6364-1.3681-4.115 0.48194-7.0996 2.7051-2.684 1.9992-4.605 2.234-4.4922 4.8711-0.25326-0.02538-0.49076-0.03-0.68164-0.00782-0.60936 0.07068-0.85221 0.17156-1.498-0.19922-0.64583-0.37077-0.67769-0.62946-0.92383-1.1914-0.24616-0.56187-0.82291-1.4044-1.9785-1.7109-1.1557-0.30652-2.5727 0.2791-2.5664 0.50391 0.00642 0.2245 0.95492-0.16129 1.9062 0.4375 0.95133 0.5986 0.88401 0.96847 1.0938 1.4473 0.20965 0.47887 0.58419 1.3777 1.6309 1.9785 1.0467 0.60083 2.012 0.47055 2.5312 0.41015 0.39726-0.04626 0.51372-0.11362 0.78515 0.09375 0.79696 2.7289 2.7246 2.4282 6.0664 3.6856 3.6364 1.3682 4.7478 3.0142 7.7324 0.79102 2.1872-1.6292 1.9673-2.8037 1.4394-4.7246 4.8594-2.0322 7.4092-5.8859 8.5684-8.5684 1.3149-3.0428 1.4871-4.4502 4.9668-6.4707 3.4797-2.0206 4.7876-1.4757 8.082-1.1094 3.2945 0.36624 8.7909-0.06289 13.348-4.6426 4.5568-4.5795 4.9667-10.08 4.584-13.373-0.38271-3.2926-0.93681-4.5962 1.0664-8.0859 2.0032-3.4897 3.4032-3.668 6.4394-4.998 3.0363-1.33 7.5798-4.447 9.2363-10.691 1.6565-6.2444-0.74299-11.207-2.7207-13.867-1.9777-2.6602-3.1091-3.5133-3.1191-7.5371-0.00983-4.0238 1.1195-4.8789 3.084-7.5488 1.9645-2.6699 4.3321-7.6448 2.6445-13.881-1.6876-6.236-6.2404-9.3355-9.2832-10.65-3.0428-1.3149-4.4502-1.4871-6.4707-4.9668-2.0206-3.4797-1.4737-4.7876-1.1074-8.082 0.36631-3.2945-0.065-8.7909-4.6445-13.348-4.5795-4.5568-10.08-4.9667-13.373-4.584-3.2926 0.38271-4.5962 0.93682-8.0859-1.0664-3.4897-2.0032-3.6661-3.4032-4.9961-6.4394-1.33-3.0362-4.449-7.5798-10.693-9.2363-1.5611-0.41413-3.0428-0.57576-4.4277-0.55273z" fill="#fff" style="paint-order:markers fill stroke"/> + <g id="text1058" fill="#fff" stroke-width=".22458" aria-label="SETUP"> + <path id="path1450" d="m-30.49 191.19q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21054 2.1639 0.21054 1.041 0 1.041-1.1112 0-0.30998-0.15206-0.58486-0.14621-0.28072-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69013 1.661-0.69013 0.78955 0 2.1639 0.14622v1.4095q-1.4212-0.15207-2.1639-0.15207-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25733-0.08773 0.15791-0.12867 0.32167-0.03509 0.16376-0.03509 0.32167 0 0.69012 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25733 0.46788 0.23979 0.772 0.61995 0.30412 0.38015 0.42109 0.76615t0.11697 0.80125q0 1.1346-0.70182 1.8247t-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1452" d="m-26.636 182.86h4.4917v1.4036h-2.9944v1.9885h2.2458v1.4036h-2.2458v2.0353h2.9944v1.4036h-4.4917z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1454" d="m-15.787 182.86v1.4036h-1.8715v6.8311h-1.4972v-6.8311h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1456" d="m-15.044 182.86h1.4972v5.2403q0 0.54976 0.04094 0.74861 0.08773 0.42109 0.3743 0.67843 0.29243 0.25733 0.70767 0.25733t0.70182-0.25733q0.29242-0.25734 0.38015-0.67843 0.04094-0.19885 0.04094-0.74861v-5.2403h1.4972v5.2403q0 0.82464-0.093576 1.2223-0.19885 0.82464-0.88897 1.3452-0.68428 0.52051-1.6376 0.52051-0.95331 0-1.6434-0.52051-0.68428-0.52052-0.88312-1.3452-0.09358-0.3977-0.09358-1.2223z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1458" d="m-7.1892 184.26v2.3862h0.36261q0.50297 0 0.73106-0.0468 0.3977-0.0819 0.63164-0.39185 0.23979-0.30997 0.23979-0.75445 0-0.44449-0.23979-0.75446-0.23394-0.30997-0.63164-0.39185-0.22809-0.0468-0.73106-0.0468zm-1.4972-1.4036h1.8481q0.86558 0 1.234 0.0877 0.83049 0.19885 1.351 0.87142 0.52637 0.66673 0.52637 1.6376 0 0.97085-0.52637 1.6434-0.52052 0.66673-1.351 0.86558-0.36846 0.0877-1.234 0.0877h-0.35091v3.0412h-1.4972z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + <g id="text1062" fill="#fff" stroke-width=".22458" aria-label="TOOLS"> + <path id="path1439" d="m2.1625 182.86v1.4036h-1.8715v6.8311h-1.4972v-6.8311h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1441" d="m4.3967 185.47q-0.087728 0.47958-0.087728 1.5089t0.087728 1.5089q0.10527 0.56731 0.386 0.93576 0.28658 0.36261 0.74276 0.36261t0.73691-0.36261q0.28658-0.36845 0.39185-0.93576 0.087728-0.48542 0.087728-1.5089 0-1.0293-0.087728-1.5089-0.10527-0.5673-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846t-0.74276 0.36846q-0.28073 0.36261-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64919-0.16376-2.0411 0-1.392 0.16376-2.0411 0.23394-0.94746 0.88312-1.5557 0.64918-0.6141 1.6668-0.6141t1.6668 0.6141q0.64918 0.60824 0.88312 1.5557 0.16376 0.64918 0.16376 2.0411 0 1.3919-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60824-1.6668 0.60824t-1.6668-0.60824q-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1443" d="m10.38 185.47q-0.08773 0.47958-0.08773 1.5089t0.08773 1.5089q0.10527 0.56731 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36845 0.39185-0.93576 0.08773-0.48542 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.5673-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.36261-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64919-0.16376-2.0411 0-1.392 0.16376-2.0411 0.23394-0.94746 0.88312-1.5557 0.64918-0.6141 1.6668-0.6141 1.0176 0 1.6668 0.6141 0.64918 0.60824 0.88312 1.5557 0.16376 0.64918 0.16376 2.0411 0 1.3919-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60824-1.6668 0.60824-1.0176 0-1.6668-0.60824-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1445" d="m15.246 191.1v-8.2347h1.4972v6.8311h3.3687v1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1447" d="m23.358 191.19q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21054 2.1639 0.21054 1.041 0 1.041-1.1112 0-0.30998-0.15206-0.58486-0.14621-0.28072-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69013 1.661-0.69013 0.78955 0 2.1639 0.14622v1.4095q-1.4212-0.15207-2.1639-0.15207-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25733-0.08773 0.15791-0.12867 0.32167-0.03509 0.16376-0.03509 0.32167 0 0.69012 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25733 0.46788 0.23979 0.772 0.61995 0.30412 0.38015 0.42109 0.76615t0.11697 0.80125q0 1.1346-0.70182 1.8247t-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + <path id="rect830-6-9" d="m45.345 70.722h82.571c0.28796 0 0.51978 0.23182 0.51978 0.51978v22.171c0 0.28796-0.23182 0.51978-0.51978 0.51978h-82.571c-0.28796 0-0.51978-0.23182-0.51978-0.51978v-22.171c0-0.28796 0.23182-0.51978 0.51978-0.51978z" fill="#fff" style="paint-order:markers fill stroke"/> + <path id="rect1070" transform="matrix(.26458 0 0 .26458 -113.76 68.446)" d="m645.59 29.135c-2.1498-0.02041-6.2168 0.060546-6.2168 0.060546-0.19725 0-0.35547 0.16556-0.35547 0.37109v2.5234h-3.7051v-2.5234c0-0.20557-0.15821-0.37109-0.35547-0.37109h-4.416c-0.19725 0-0.35742 0.16556-0.35742 0.37109v9.3535c0 0.20557 0.16001 0.37109 0.35742 0.37109h4.416c0.19726 0 0.35547-0.16549 0.35547-0.37109v-2.5234h3.7051v2.5234c0 0.20557 0.15825 0.37109 0.35547 0.37109h2.2363v11.906c-1.2817 0.50868-2.1856 1.7497-2.1856 3.2168v18.037c0 1.9217 1.5471 3.4688 3.4688 3.4688s3.4688-1.5471 3.4688-3.4688v-18.037c0-1.4664-0.90281-2.7077-2.1836-3.2168v-11.906h2.4199c0.38561-0.33948 0.75677-1.7011 2.8164-2.7422 5.5309-1.9 7.9328 2.808 7.8906 2.1582-0.04505-0.69374-0.35053-5.0458-5.1797-7.5977-2.1039-0.94608-4.5788-1.8754-5.2461-1.9141-0.06886-0.036283-0.57246-0.053743-1.2891-0.060546z" fill="#e5b62f" stroke="#e5b62f" stroke-width=".82412" style="paint-order:markers fill stroke"/> + <path id="path1078" transform="matrix(.26458 0 0 .26458 -113.76 68.446)" d="m650.43 14.668c-1.8975 0.031546-3.3994 0.81867-4.3105 1.4961-1.2149 0.90321-1.6057 1.4193-3.4434 1.4238-1.8376 0.004535-2.2279-0.51103-3.4473-1.4082-1.2194-0.89717-3.4899-1.9797-6.3379-1.209-2.848 0.77071-4.2647 2.8506-4.8652 4.2402-0.60051 1.3896-0.67842 2.0323-2.2676 2.9551-1.5892 0.92278-2.1868 0.67314-3.6914 0.50586-1.5046-0.16728-4.0146 0.029711-6.0957 2.1211-2.0811 2.0914-2.2685 4.6037-2.0938 6.1074 0.17473 1.5037 0.42851 2.0996-0.48633 3.6934-0.91487 1.5937-1.5548 1.6738-2.9414 2.2812-1.3867 0.60742-3.4622 2.033-4.2188 4.8848-0.75654 2.8518 0.34098 5.1172 1.2441 6.332 0.90319 1.2149 1.4192 1.6057 1.4238 3.4434 0.00454 1.8377-0.51104 2.2279-1.4082 3.4473-0.89718 1.2194-1.9797 3.4899-1.209 6.3379 0.77071 2.848 2.8506 4.2647 4.2402 4.8652 1.3896 0.60051 2.0323 0.67843 2.9551 2.2676 0.92276 1.5892 0.67316 2.1868 0.50586 3.6914-0.16728 1.5046 0.0297 4.0146 2.1211 6.0957 2.0915 2.0811 4.6037 2.2627 6.1074 2.0879s2.0996-0.42268 3.6934 0.49219c1.5937 0.91486 1.6738 1.5547 2.2812 2.9414 0.60742 1.3866 2.031 3.4622 4.8828 4.2188 2.8518 0.75653 6.3494-0.68953 6.334-1.2441-0.01587-0.55461-2.3574 0.40097-4.7051-1.0762-2.3477-1.4771-2.1797-2.3907-2.6973-3.5723-0.51759-1.1816-1.4424-3.3982-4.0254-4.8809-2.5829-1.4827-4.9648-1.1626-6.2461-1.0137-1.2813 0.14895-1.3671 0.4076-2.6894-0.9082-1.3224-1.3158-1.0586-1.3996-0.91602-2.6816 0.14257-1.2821 0.44867-3.6627-1.0469-6.2383-1.4956-2.5755-3.7163-3.4922-4.9004-4.0039-1.1841-0.5117-1.3857-0.3243-1.873-2.125-0.4873-1.8007-0.22347-1.7481 0.54102-2.7871 0.76447-1.039 2.2242-2.9456 2.2168-5.9238-0.00718-2.9783-1.4764-4.8828-2.2461-5.918-0.76963-1.0352-1.037-0.98009-0.55859-2.7832 0.47833-1.8031 0.68366-1.6152 1.8652-2.1328 1.1816-0.51758 3.4001-1.4444 4.8828-4.0273 1.4827-2.583 1.1626-4.9648 1.0137-6.2461-0.14883-1.2813-0.4076-1.3671 0.90821-2.6895 1.3158-1.3224 1.3976-1.0625 2.6797-0.91992 1.282 0.14253 3.6647 0.45453 6.2402-1.041 2.5755-1.4955 3.4864-3.7163 3.998-4.9004 0.5117-1.1841 0.33013-1.3877 2.1309-1.875 1.8007-0.48731 1.7481-0.22346 2.7871 0.54102 1.039 0.76448 2.9456 2.2262 5.9238 2.2188 2.9783-0.007559 4.8789-1.4784 5.9141-2.248 1.0352-0.76964 0.97813-1.0369 2.7812-0.55859 1.8031 0.47834 1.6211 0.68562 2.1387 1.8672s1.4444 3.4001 4.0273 4.8828c2.583 1.4827 4.9648 1.1607 6.2461 1.0117 1.2813-0.14891 1.3651-0.40761 2.6875 0.9082 1.3224 1.3158 1.0605 1.3996 0.91796 2.6816-0.14252 1.282-0.45062 3.6627 1.0449 6.2383 1.4955 2.5755 3.7162 3.4864 4.9004 3.998 1.1841 0.5117 1.3877 0.33017 1.875 2.1309 0.4873 1.8007 0.22346 1.75-0.54102 2.7891-0.76448 1.039-2.2262 2.9436-2.2188 5.9219 0.00756 2.9783 1.4765 4.8789 2.2461 5.9141 0.76965 1.0352 1.0369 0.97814 0.5586 2.7812-0.47834 1.8031-0.68366 1.623-1.8652 2.1406-1.1816 0.51757-3.4001 1.4424-4.8828 4.0254-1.4827 2.5829-1.1606 4.9647-1.0117 6.2461 0.14892 1.2813 0.40761 1.3671-0.9082 2.6895-1.3158 1.3224-1.3996 1.0586-2.6816 0.91602-1.2821-0.14256-3.6627-0.44865-6.2383 1.0469-2.5756 1.4956-3.4922 3.7163-4.0039 4.9004-0.42128 0.9749-0.43101 1.2878-1.4043 1.6387-0.13992-0.076625-0.29747-0.15087-0.48828-0.22266-1.6607-0.62483-1.8791 0.22103-3.2422 1.2363-1.2238 0.91157-2.0954 1.0256-2.0469 2.2246-0.11719-0.012065-0.22826-0.014151-0.3164-0.003906-0.27829 0.032126-0.38865 0.077526-0.6836-0.091797s-0.30944-0.28632-0.42187-0.54297c-0.11241-0.25659-0.3746-0.64124-0.90235-0.78125-0.52779-0.13999-1.1767 0.12778-1.1738 0.23047 0.00294 0.10254 0.43663-0.07423 0.8711 0.19922 0.43447 0.27337 0.40421 0.4415 0.5 0.66016 0.09573 0.21868 0.26613 0.6299 0.74414 0.9043 0.47801 0.27439 0.91911 0.21509 1.1562 0.1875 0.17999-0.020943 0.25507-0.013849 0.37695 0.078125 0.37226 1.1966 1.2449 1.0814 2.752 1.6484 1.6608 0.62483 2.1701 1.3747 3.5332 0.35938 0.9978-0.74322 0.89282-1.2826 0.65234-2.1582 2.2207-0.92784 3.3884-2.6867 3.918-3.9121 0.60051-1.3896 0.67842-2.0323 2.2676-2.9551 1.5892-0.92279 2.1868-0.67315 3.6914-0.50586 1.5046 0.16724 4.0146-0.029704 6.0957-2.1211 2.0811-2.0914 2.2685-4.6037 2.0938-6.1074-0.17477-1.5037-0.42853-2.0996 0.48633-3.6934 0.91486-1.5937 1.5548-1.6738 2.9414-2.2812 1.3866-0.60742 3.4622-2.031 4.2188-4.8828 0.75654-2.8518-0.34091-5.1191-1.2441-6.334-0.90322-1.2149-1.4193-1.6038-1.4238-3.4414-0.00453-1.8376 0.51104-2.2279 1.4082-3.4473 0.89716-1.2194 1.9797-3.4919 1.209-6.3398-0.77071-2.848-2.8506-4.2647-4.2402-4.8652-1.3896-0.60051-2.0323-0.67842-2.9551-2.2676-0.92278-1.5892-0.67315-2.1868-0.50586-3.6914 0.16728-1.5046-0.02971-4.0146-2.1211-6.0957-2.0914-2.0811-4.6037-2.2685-6.1074-2.0938-1.5037 0.17477-2.0996 0.42853-3.6934-0.48633-1.5937-0.91486-1.6738-1.5548-2.2812-2.9414-0.60741-1.3866-2.033-3.4622-4.8848-4.2188-0.71295-0.18913-1.389-0.26247-2.0215-0.25195z" fill="#336790" style="paint-order:markers fill stroke"/> + <g id="text1090" fill="#336790" stroke-width=".22458" aria-label="SETUP"> + <path id="path1424" d="m70.306 86.538q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.06433-0.46788-0.10527-0.29827-0.04094-0.61409-0.11697-0.30997-0.08188-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.09942-0.16376 0.09943-0.25149 0.25733-0.08773 0.15791-0.12867 0.32167-0.03509 0.16376-0.03509 0.32167 0 0.69012 0.46788 0.90067 0.15206 0.07018 0.69012 0.15791 0.54391 0.08188 0.87728 0.25733 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247t-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1426" d="m74.16 78.209h4.4917v1.4036h-2.9944v1.9885h2.2458v1.4036h-2.2458v2.0353h2.9944v1.4036h-4.4917z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1428" d="m85.009 78.209v1.4036h-1.8715v6.8311h-1.4972v-6.8311h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1430" d="m85.752 78.209h1.4972v5.2403q0 0.54976 0.04094 0.74861 0.08773 0.42109 0.3743 0.67843 0.29242 0.25734 0.70767 0.25734t0.70182-0.25734q0.29243-0.25733 0.38015-0.67843 0.04094-0.19885 0.04094-0.74861v-5.2403h1.4972v5.2403q0 0.82464-0.09358 1.2223-0.19885 0.82464-0.88897 1.3452-0.68428 0.52052-1.6376 0.52052-0.95331 0-1.6434-0.52052-0.68428-0.52052-0.88312-1.3452-0.09358-0.3977-0.09358-1.2223z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1432" d="m93.607 79.613v2.3862h0.36261q0.50297 0 0.73106-0.04679 0.3977-0.08188 0.63164-0.39185 0.23979-0.30997 0.23979-0.75446t-0.23979-0.75446q-0.23394-0.30997-0.63164-0.39185-0.22809-0.04679-0.73106-0.04679zm-1.4972-1.4036h1.8481q0.86558 0 1.234 0.08773 0.83049 0.19885 1.351 0.87143 0.52637 0.66673 0.52637 1.6376t-0.52637 1.6434q-0.52052 0.66673-1.351 0.86558-0.36846 0.08773-1.234 0.08773h-0.35091v3.0412h-1.4972z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + <g id="text1094" fill="#e5b62f" stroke-width=".22458" aria-label="TOOLS"> + <path id="path1413" d="m102.96 78.209v1.4036h-1.8715v6.8311h-1.4972v-6.8311h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1415" d="m105.19 80.818q-0.0877 0.47958-0.0877 1.5089t0.0877 1.5089q0.10528 0.5673 0.38601 0.93576 0.28657 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.0877-0.48543 0.0877-1.5089 0-1.0293-0.0877-1.5089-0.10527-0.5673-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45619 0-0.74276 0.36846-0.28073 0.36261-0.38601 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411t0.16376-2.0411q0.23394-0.94746 0.88312-1.5557 0.64919-0.61409 1.6668-0.61409t1.6668 0.61409q0.64919 0.60824 0.88313 1.5557 0.16375 0.64918 0.16375 2.0411t-0.16375 2.0411q-0.23394 0.94746-0.88313 1.5616-0.64918 0.60824-1.6668 0.60824t-1.6668-0.60824q-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1417" d="m111.18 80.818q-0.0877 0.47958-0.0877 1.5089t0.0877 1.5089q0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261t0.73691-0.36261q0.28658-0.36846 0.39185-0.93576 0.0877-0.48543 0.0877-1.5089 0-1.0293-0.0877-1.5089-0.10527-0.5673-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846t-0.74276 0.36846q-0.28073 0.36261-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411t0.16376-2.0411q0.23394-0.94746 0.88313-1.5557 0.64918-0.61409 1.6668-0.61409t1.6668 0.61409q0.64919 0.60824 0.88313 1.5557 0.16376 0.64918 0.16376 2.0411t-0.16376 2.0411q-0.23394 0.94746-0.88313 1.5616-0.64918 0.60824-1.6668 0.60824t-1.6668-0.60824q-0.64919-0.61409-0.88313-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1419" d="m116.04 86.444v-8.2347h1.4972v6.8311h3.3687v1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1421" d="m124.15 86.538q-0.95916 0-2.164-0.18715v-1.427q1.4095 0.21055 2.164 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.06433-0.46789-0.10527-0.29827-0.04094-0.61409-0.11697-0.30997-0.08188-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78954 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22225 0-0.39185 0.09942-0.16376 0.09943-0.25149 0.25733-0.0877 0.15791-0.12867 0.32167-0.0351 0.16376-0.0351 0.32167 0 0.69012 0.46788 0.90067 0.15206 0.07018 0.69013 0.15791 0.54391 0.08188 0.87727 0.25733 0.46788 0.23979 0.77201 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247t-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + <path id="rect830-6" d="m45.345 102.91h82.571c0.28796 0 0.51978 0.23183 0.51978 0.51978v22.171c0 0.28796-0.23182 0.51978-0.51978 0.51978h-82.571c-0.28796 0-0.51978-0.23182-0.51978-0.51978v-22.171c0-0.28795 0.23182-0.51978 0.51978-0.51978z" fill="#336790" style="paint-order:markers fill stroke"/> + <path id="rect1168" transform="matrix(.26458 0 0 .26458 -113.76 68.446)" d="m645.59 150.78c-2.1498-0.02041-6.2168 0.06055-6.2168 0.06055-0.19725 0-0.35547 0.16549-0.35547 0.37109v2.5254h-3.7051v-2.5254c0-0.2056-0.15821-0.37109-0.35547-0.37109h-4.416c-0.19725 0-0.35742 0.16549-0.35742 0.37109v9.3535c0 0.20561 0.16001 0.37109 0.35742 0.37109h4.416c0.19726 0 0.35547-0.16548 0.35547-0.37109v-2.5234h3.7051v2.5234c0 0.20561 0.15825 0.37109 0.35547 0.37109h2.2363v11.906c-1.2817 0.50867-2.1856 1.7516-2.1856 3.2188v18.037c0 1.9216 1.5471 3.4688 3.4688 3.4688s3.4688-1.5471 3.4688-3.4688v-18.037c0-1.4665-0.90278-2.7096-2.1836-3.2188v-11.906h2.4199c0.38561-0.3394 0.75677-1.7011 2.8164-2.7422 5.5309-1.9 7.9328 2.808 7.8906 2.1582-0.04505-0.69373-0.35053-5.0458-5.1797-7.5977-2.1039-0.94609-4.5788-1.8754-5.2461-1.9141-0.06886-0.03628-0.57246-0.05374-1.2891-0.06055z" fill="#e5b62f" stroke="#e5b62f" stroke-width=".82412" style="paint-order:markers fill stroke"/> + <path id="path1176" transform="matrix(.26458 0 0 .26458 -113.76 68.446)" d="m650.43 136.31c-1.8975 0.03152-3.3994 0.81663-4.3105 1.4941-1.2149 0.90323-1.6057 1.4193-3.4434 1.4238-1.8376 0.00378-2.2279-0.51106-3.4473-1.4082-1.2194-0.89719-3.4899-1.9778-6.3379-1.207-2.848 0.77069-4.2647 2.8506-4.8652 4.2402-0.60051 1.3896-0.67842 2.0304-2.2676 2.9531-1.5892 0.92281-2.1868 0.6751-3.6914 0.50782-1.5046-0.16744-4.0146 0.02853-6.0957 2.1191-2.0811 2.0914-2.2685 4.6056-2.0938 6.1094 0.17473 1.5038 0.42851 2.0977-0.48633 3.6914-0.91487 1.5937-1.5548 1.6758-2.9414 2.2832-1.3867 0.60741-3.4622 2.031-4.2188 4.8828-0.75654 2.8518 0.34098 5.1171 1.2441 6.332 0.90319 1.2149 1.4192 1.6057 1.4238 3.4434 0.00454 1.8376-0.51104 2.2279-1.4082 3.4473-0.89718 1.2194-1.9797 3.4899-1.209 6.3379 0.77071 2.848 2.8506 4.2647 4.2402 4.8652 1.3896 0.6005 2.0323 0.67844 2.9551 2.2676 0.92276 1.5892 0.67316 2.1868 0.50586 3.6914-0.16728 1.5046 0.0297 4.0146 2.1211 6.0957 2.0915 2.081 4.6037 2.2627 6.1074 2.0879 1.5037-0.17499 2.0996-0.42268 3.6934 0.49219 1.5937 0.91484 1.6738 1.5548 2.2812 2.9414 0.60742 1.3866 2.031 3.4622 4.8828 4.2188 2.8518 0.75654 6.3494-0.68758 6.334-1.2422-0.01587-0.55461-2.3574 0.39899-4.7051-1.0781-2.3477-1.4772-2.1797-2.3888-2.6973-3.5703-0.51759-1.1816-1.4424-3.4001-4.0254-4.8828-2.5829-1.4827-4.9648-1.1626-6.2461-1.0137-1.2813 0.14891-1.3671 0.40955-2.6894-0.90625-1.3224-1.3158-1.0586-1.3996-0.91602-2.6816 0.14257-1.2821 0.44867-3.6647-1.0469-6.2402-1.4956-2.5756-3.7163-3.4922-4.9004-4.0039-1.1841-0.51171-1.3857-0.32447-1.873-2.125-0.4873-1.8008-0.22347-1.7481 0.54102-2.7871 0.76447-1.039 2.2242-2.9456 2.2168-5.9238-0.00718-2.9783-1.4764-4.8828-2.2461-5.918-0.76963-1.0352-1.037-0.97811-0.55859-2.7812 0.47833-1.8031 0.68366-1.6172 1.8652-2.1348 1.1816-0.51756 3.4001-1.4444 4.8828-4.0273 1.4827-2.583 1.1626-4.9628 1.0137-6.2441-0.14883-1.2813-0.4076-1.3671 0.90821-2.6894s1.3976-1.0644 2.6797-0.92188c1.282 0.14249 3.6647 0.45455 6.2402-1.041 2.5755-1.4955 3.4864-3.7163 3.998-4.9004 0.5117-1.1842 0.33013-1.3877 2.1309-1.875 1.8007-0.48734 1.7481-0.22152 2.7871 0.54296 1.039 0.76449 2.9456 2.2242 5.9238 2.2168 2.9783-0.00756 4.8789-1.4765 5.9141-2.2461 1.0352-0.76967 0.97813-1.0369 2.7812-0.5586 1.8031 0.47834 1.6211 0.68368 2.1387 1.8652 0.51758 1.1816 1.4444 3.4001 4.0273 4.8828s4.9648 1.1607 6.2461 1.0117c1.2813-0.14891 1.3651-0.4076 2.6875 0.9082 1.3224 1.3158 1.0605 1.3996 0.91796 2.6816-0.14252 1.2821-0.45062 3.6627 1.0449 6.2383 1.4955 2.5755 3.7162 3.4883 4.9004 4 1.1841 0.51171 1.3877 0.32819 1.875 2.1289 0.4873 1.8007 0.22346 1.7501-0.54102 2.7891-0.76448 1.039-2.2262 2.9456-2.2188 5.9238 0.00756 2.9783 1.4765 4.8769 2.2461 5.9121 0.76965 1.0352 1.0369 0.97811 0.5586 2.7812-0.47834 1.8031-0.68366 1.6231-1.8652 2.1406-1.1816 0.51756-3.4001 1.4444-4.8828 4.0273-1.4827 2.583-1.1606 4.9628-1.0117 6.2441s0.40761 1.3671-0.9082 2.6894-1.3996 1.0585-2.6816 0.91602c-1.2821-0.14249-3.6627-0.44865-6.2383 1.0469-2.5756 1.4956-3.4922 3.7163-4.0039 4.9004-0.42101 0.9743-0.43101 1.288-1.4023 1.6387-0.14024-0.0769-0.29879-0.15063-0.49024-0.22266-1.6607-0.62483-1.8791 0.22103-3.2422 1.2363-1.2238 0.91157-2.0954 1.0256-2.0469 2.2246-0.11719-0.01206-0.22826-0.01416-0.3164-0.00391-0.27829 0.03402-0.38865 0.07753-0.6836-0.0918-0.29495-0.16932-0.30944-0.28633-0.42187-0.54296-0.11241-0.25663-0.3746-0.64126-0.90235-0.78125-0.52779-0.13985-1.1767 0.12766-1.1738 0.23046 0.00294 0.10243 0.43663-0.07441 0.8711 0.19922 0.43447 0.27326 0.40421 0.44148 0.5 0.66016 0.09573 0.21883 0.26613 0.62986 0.74414 0.9043 0.47801 0.27439 0.91911 0.21509 1.1562 0.1875 0.17999-0.02009 0.25507-0.01352 0.37695 0.07812 0.37226 1.1966 1.2449 1.0814 2.752 1.6484 1.6608 0.62483 2.1701 1.3747 3.5332 0.35937 0.99763-0.74309 0.89468-1.2829 0.65429-2.1582 2.2192-0.92808 3.3866-2.687 3.916-3.9121 0.60051-1.3897 0.67842-2.0323 2.2676-2.9551s2.1868-0.67314 3.6914-0.50586c1.5046 0.16744 4.0146-0.03044 6.0957-2.1211 2.0811-2.0914 2.2685-4.6037 2.0938-6.1074s-0.42853-2.0996 0.48633-3.6934c0.91486-1.5938 1.5548-1.6738 2.9414-2.2812 1.3866-0.60741 3.4622-2.031 4.2188-4.8828 0.75654-2.8518-0.34091-5.1191-1.2441-6.334-0.90322-1.2149-1.4193-1.6038-1.4238-3.4414-0.00453-1.8376 0.51104-2.2279 1.4082-3.4473 0.89716-1.2194 1.9797-3.4919 1.209-6.3398-0.77071-2.848-2.8506-4.2628-4.2402-4.8633-1.3896-0.60053-2.0323-0.68036-2.9551-2.2695-0.92278-1.5891-0.67315-2.1849-0.50586-3.6894 0.16728-1.5046-0.02971-4.0166-2.1211-6.0977-2.0914-2.0811-4.6037-2.2666-6.1074-2.0918-1.5037 0.17499-2.0996 0.42659-3.6934-0.48828-1.5937-0.91488-1.6738-1.5548-2.2812-2.9414-0.60741-1.3867-2.033-3.4602-4.8848-4.2168-0.71295-0.18912-1.389-0.26246-2.0215-0.25195z" fill="#fff" style="paint-order:markers fill stroke"/> + <g id="text1188" fill="#fff" stroke-width=".22458" aria-label="SETUP"> + <path id="path1385" d="m70.306 118.72q-0.95916 0-2.1639-0.18716v-1.427q1.4095 0.21054 2.1639 0.21054 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10528-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25148-1.0995-0.65504-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69013 1.661-0.69013 0.78955 0 2.1639 0.14622v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32166-0.03509 0.16376-0.03509 0.32167 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23978 0.772 0.61994 0.30412 0.38015 0.42109 0.76615 0.11697 0.386 0.11697 0.80125 0 1.1346-0.70182 1.8247-0.70182 0.69013-1.8364 0.69013z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1387" d="m74.16 110.39h4.4917v1.4036h-2.9944v1.9885h2.2458v1.4036h-2.2458v2.0353h2.9944v1.4036h-4.4917z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1389" d="m85.009 110.39v1.4036h-1.8715v6.8311h-1.4972v-6.8311h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1391" d="m85.752 110.39h1.4972v5.2403q0 0.54976 0.04094 0.74861 0.08773 0.4211 0.3743 0.67843 0.29242 0.25733 0.70767 0.25733t0.70182-0.25733q0.29243-0.25733 0.38015-0.67843 0.04094-0.19885 0.04094-0.74861v-5.2403h1.4972v5.2403q0 0.82464-0.09358 1.2223-0.19885 0.82464-0.88897 1.3452-0.68428 0.52052-1.6376 0.52052-0.95331 0-1.6434-0.52052-0.68428-0.52052-0.88312-1.3452-0.09358-0.3977-0.09358-1.2223z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1393" d="m93.607 111.8v2.3862h0.36261q0.50297 0 0.73106-0.0468 0.3977-0.0819 0.63164-0.39185 0.23979-0.30997 0.23979-0.75446t-0.23979-0.75446q-0.23394-0.30997-0.63164-0.39185-0.22809-0.0468-0.73106-0.0468zm-1.4972-1.4036h1.8481q0.86558 0 1.234 0.0877 0.83049 0.19885 1.351 0.87143 0.52637 0.66673 0.52637 1.6376t-0.52637 1.6434q-0.52052 0.66673-1.351 0.86558-0.36846 0.0877-1.234 0.0877h-0.35091v3.0412h-1.4972z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + <g id="text1192" fill="#e5b62f" stroke-width=".22458" aria-label="TOOLS"> + <path id="path1396" d="m102.96 110.39v1.4036h-1.8715v6.8311h-1.4972v-6.8311h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1398" d="m105.19 113q-0.0877 0.47957-0.0877 1.5089t0.0877 1.5089q0.10528 0.5673 0.38601 0.93576 0.28657 0.3626 0.74276 0.3626 0.45618 0 0.73691-0.3626 0.28658-0.36846 0.39185-0.93576 0.0877-0.48543 0.0877-1.5089 0-1.0293-0.0877-1.5089-0.10527-0.56731-0.39185-0.92992-0.28073-0.36845-0.73691-0.36845-0.45619 0-0.74276 0.36845-0.28073 0.36261-0.38601 0.92992zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94746 0.88312-1.5557 0.64919-0.6141 1.6668-0.6141t1.6668 0.6141q0.64919 0.60824 0.88313 1.5557 0.16375 0.64919 0.16375 2.0411 0 1.392-0.16375 2.0411-0.23394 0.94746-0.88313 1.5616-0.64918 0.60825-1.6668 0.60825t-1.6668-0.60825q-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1400" d="m111.18 113q-0.0877 0.47957-0.0877 1.5089t0.0877 1.5089q0.10527 0.5673 0.386 0.93576 0.28658 0.3626 0.74276 0.3626t0.73691-0.3626q0.28658-0.36846 0.39185-0.93576 0.0877-0.48543 0.0877-1.5089 0-1.0293-0.0877-1.5089-0.10527-0.56731-0.39185-0.92992-0.28073-0.36845-0.73691-0.36845t-0.74276 0.36845q-0.28073 0.36261-0.386 0.92992zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94746 0.88313-1.5557 0.64918-0.6141 1.6668-0.6141t1.6668 0.6141q0.64919 0.60824 0.88313 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88313 1.5616-0.64918 0.60825-1.6668 0.60825t-1.6668-0.60825q-0.64919-0.61409-0.88313-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1402" d="m116.04 118.63v-8.2347h1.4972v6.8311h3.3687v1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1404" d="m124.15 118.72q-0.95916 0-2.164-0.18716v-1.427q1.4095 0.21054 2.164 0.21054 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46789-0.10528-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25148-1.0995-0.65504-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69013 1.661-0.69013 0.78954 0 2.1639 0.14622v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22225 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.0877 0.15791-0.12867 0.32166-0.0351 0.16376-0.0351 0.32167 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69013 0.15791 0.54391 0.0819 0.87727 0.25734 0.46788 0.23978 0.77201 0.61994 0.30412 0.38015 0.42109 0.76615t0.11697 0.80125q0 1.1346-0.70182 1.8247-0.70182 0.69013-1.8364 0.69013z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + <path id="rect830-6-9-8" d="m45.345 135.09h82.571c0.28796 0 0.51978 0.23182 0.51978 0.51978v22.171c0 0.28796-0.23182 0.51978-0.51978 0.51978h-82.571c-0.28796 0-0.51978-0.23182-0.51978-0.51978v-22.171c0-0.28796 0.23182-0.51978 0.51978-0.51978z" fill="#fff" style="paint-order:markers fill stroke"/> + <path id="rect1222" transform="matrix(.26458 0 0 .26458 -113.76 68.446)" d="m645.59 272.42c-2.1498-0.02041-6.2168 0.06055-6.2168 0.06055-0.19725 0-0.35547 0.16744-0.35547 0.37305v2.5234h-3.7051v-2.5234c0-0.20561-0.15821-0.37305-0.35547-0.37305h-4.416c-0.19725 0-0.35742 0.16744-0.35742 0.37305v9.3535c0 0.20561 0.16001 0.36914 0.35742 0.36914h4.416c0.19726 0 0.35547-0.16353 0.35547-0.36914v-2.5254h3.7051v2.5254c0 0.20561 0.15825 0.36914 0.35547 0.36914h2.2363v11.906c-1.2817 0.50867-2.1856 1.7516-2.1856 3.2188v18.037c0 1.9217 1.5471 3.4688 3.4688 3.4688s3.4688-1.5471 3.4688-3.4688v-18.037c0-1.4665-0.90278-2.7096-2.1836-3.2188v-11.906h2.4199c0.38561-0.3394 0.75677-1.7011 2.8164-2.7422 5.5309-1.9 7.9328 2.808 7.8906 2.1582-0.04505-0.69373-0.35053-5.0458-5.1797-7.5977-2.1039-0.94609-4.5788-1.8754-5.2461-1.9141-0.06886-0.03628-0.57246-0.05374-1.2891-0.06055z" stroke="#000" stroke-width=".82412" style="paint-order:markers fill stroke"/> + <path id="path1230" transform="matrix(.26458 0 0 .26458 -113.76 68.446)" d="m650.43 257.96c-1.8975 0.03152-3.3994 0.81663-4.3105 1.4941-1.2149 0.90323-1.6057 1.4192-3.4434 1.4238-1.8376 0.00378-2.2279-0.51106-3.4473-1.4082-1.2194-0.89718-3.4899-1.9778-6.3379-1.207-2.848 0.77069-4.2647 2.8506-4.8652 4.2402-0.60051 1.3896-0.67842 2.0323-2.2676 2.9551-1.5892 0.92281-2.1868 0.67315-3.6914 0.50586-1.5046-0.16743-4.0146 0.03049-6.0957 2.1211-2.0811 2.0914-2.2685 4.6037-2.0938 6.1074 0.17473 1.5038 0.42851 2.0977-0.48633 3.6914-0.91487 1.5937-1.5548 1.6758-2.9414 2.2832-1.3867 0.60741-3.4622 2.031-4.2188 4.8828-0.75654 2.8518 0.34098 5.1191 1.2441 6.334 0.90319 1.2149 1.4192 1.6038 1.4238 3.4414 0.00454 1.8376-0.51104 2.2279-1.4082 3.4473-0.89718 1.2194-1.9797 3.4918-1.209 6.3398 0.77071 2.848 2.8506 4.2628 4.2402 4.8633 1.3896 0.60049 2.0323 0.67844 2.9551 2.2676 0.92276 1.5892 0.67316 2.1868 0.50586 3.6914-0.16728 1.5046 0.0297 4.0146 2.1211 6.0957 2.0915 2.081 4.6037 2.2627 6.1074 2.0879 1.5037-0.17499 2.0996-0.42073 3.6934 0.49414 1.5937 0.91484 1.6738 1.5528 2.2812 2.9395 0.60742 1.3866 2.031 3.4622 4.8828 4.2187 2.8518 0.75655 6.3494-0.68757 6.334-1.2422-0.01587-0.55461-2.3574 0.39899-4.7051-1.0781-2.3477-1.4772-2.1797-2.3888-2.6973-3.5703-0.51759-1.1816-1.4424-3.4001-4.0254-4.8828-2.5829-1.4827-4.9648-1.1626-6.2461-1.0137-1.2813 0.14891-1.3671 0.40955-2.6894-0.90625-1.3224-1.3158-1.0586-1.3996-0.91602-2.6816 0.14257-1.282 0.44867-3.6628-1.0469-6.2383-1.4956-2.5756-3.7163-3.4922-4.9004-4.0039-1.1841-0.51171-1.3857-0.32643-1.873-2.127-0.4873-1.8008-0.22347-1.7481 0.54102-2.7871 0.76447-1.039 2.2242-2.9456 2.2168-5.9238-0.00718-2.9783-1.4764-4.8828-2.2461-5.918-0.76963-1.0352-1.037-0.97811-0.55859-2.7812 0.47833-1.8031 0.68366-1.6172 1.8652-2.1348 1.1816-0.51757 3.4001-1.4444 4.8828-4.0273 1.4827-2.583 1.1626-4.9628 1.0137-6.2441-0.14883-1.2813-0.4076-1.3671 0.90821-2.6895 1.3158-1.3224 1.3976-1.0644 2.6797-0.92187 1.282 0.14249 3.6647 0.45454 6.2402-1.041 2.5755-1.4955 3.4864-3.7163 3.998-4.9004 0.5117-1.1842 0.33013-1.3877 2.1309-1.875 1.8007-0.48733 1.7481-0.22151 2.7871 0.54297 1.039 0.76449 2.9456 2.2242 5.9238 2.2168 2.9783-0.00756 4.8789-1.4765 5.9141-2.2461 1.0352-0.76967 0.97813-1.0369 2.7812-0.5586 1.8031 0.47834 1.6211 0.68368 2.1387 1.8652 0.51758 1.1816 1.4444 3.4001 4.0273 4.8828s4.9648 1.1626 6.2461 1.0137c1.2813-0.14891 1.3651-0.40955 2.6875 0.90625 1.3224 1.3158 1.0605 1.3996 0.91796 2.6816-0.14252 1.282-0.45062 3.6627 1.0449 6.2383 1.4955 2.5755 3.7162 3.4883 4.9004 4 1.1841 0.51171 1.3877 0.33014 1.875 2.1309 0.4873 1.8007 0.22346 1.7481-0.54102 2.7871-0.76448 1.039-2.2262 2.9456-2.2188 5.9238 0.00756 2.9783 1.4765 4.8769 2.2461 5.9121 0.76965 1.0352 1.0369 0.98006 0.5586 2.7832-0.47834 1.8031-0.68366 1.6211-1.8652 2.1387-1.1816 0.51757-3.4001 1.4444-4.8828 4.0274-1.4827 2.583-1.1606 4.9628-1.0117 6.2441 0.14892 1.2813 0.40761 1.3671-0.9082 2.6894s-1.3996 1.0585-2.6816 0.91602c-1.2821-0.14249-3.6627-0.44865-6.2383 1.0469-2.5756 1.4956-3.4922 3.7163-4.0039 4.9004-0.42099 0.97425-0.43116 1.2881-1.4023 1.6387-0.14024-0.07689-0.29879-0.15062-0.49024-0.22265-1.6607-0.62483-1.8791 0.22103-3.2422 1.2363-1.2238 0.91157-2.0954 1.0256-2.0469 2.2246-0.11719-0.01207-0.22826-0.01416-0.3164-0.00391-0.27829 0.03402-0.38865 0.07948-0.6836-0.08984-0.29495-0.16933-0.30944-0.2883-0.42187-0.54493-0.11241-0.25663-0.3746-0.64125-0.90235-0.78125-0.52779-0.13984-1.1767 0.12767-1.1738 0.23047 0.00294 0.10243 0.43663-0.07442 0.8711 0.19922 0.43447 0.27326 0.40421 0.44147 0.5 0.66016 0.09573 0.21883 0.26613 0.62986 0.74414 0.90429 0.47801 0.2744 0.91911 0.21509 1.1562 0.1875 0.17999-0.02008 0.25507-0.01351 0.37695 0.07813 0.37226 1.1966 1.2449 1.0814 2.752 1.6484 1.6608 0.62483 2.1701 1.3766 3.5332 0.36133 0.99774-0.74318 0.89279-1.2827 0.65234-2.1582 2.2207-0.92783 3.3884-2.6886 3.918-3.9141 0.60051-1.3897 0.67842-2.0323 2.2676-2.9551s2.1868-0.67314 3.6914-0.50586c1.5046 0.16743 4.0146-0.03045 6.0957-2.1211 2.0811-2.0914 2.2685-4.6037 2.0938-6.1074-0.17477-1.5037-0.42853-2.0977 0.48633-3.6914 0.91486-1.5938 1.5548-1.6758 2.9414-2.2832 1.3866-0.60741 3.4622-2.031 4.2188-4.8828 0.75654-2.8518-0.34091-5.1191-1.2441-6.334-0.90322-1.2149-1.4193-1.6038-1.4238-3.4414-0.00453-1.8376 0.51104-2.2279 1.4082-3.4473 0.89716-1.2194 1.9797-3.4919 1.209-6.3398-0.77071-2.848-2.8506-4.2628-4.2402-4.8633-1.3896-0.60053-2.0323-0.68035-2.9551-2.2695-0.92278-1.5891-0.67315-2.1849-0.50586-3.6894 0.16728-1.5046-0.02971-4.0166-2.1211-6.0977-2.0914-2.0811-4.6037-2.2666-6.1074-2.0918-1.5037 0.17499-2.0996 0.42659-3.6934-0.48829-1.5937-0.91487-1.6738-1.5528-2.2812-2.9394-0.60741-1.3867-2.033-3.4622-4.8848-4.2188-0.71295-0.18913-1.389-0.26246-2.0215-0.25195z" style="paint-order:markers fill stroke"/> + <g id="text1242" stroke-width=".22458" aria-label="SETUP"> + <path id="path1372" d="m70.306 150.91q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21054 2.1639 0.21054 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28072-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69013 1.661-0.69013 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15207-2.1639-0.15207-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25733-0.08773 0.15791-0.12867 0.32167-0.03509 0.16376-0.03509 0.32167 0 0.69012 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25733 0.46788 0.23979 0.772 0.61994 0.30412 0.38016 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247t-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1374" d="m74.16 142.58h4.4917v1.4036h-2.9944v1.9885h2.2458v1.4036h-2.2458v2.0353h2.9944v1.4036h-4.4917z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1376" d="m85.009 142.58v1.4036h-1.8715v6.8311h-1.4972v-6.8311h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1378" d="m85.752 142.58h1.4972v5.2403q0 0.54976 0.04094 0.74861 0.08773 0.42109 0.3743 0.67843 0.29242 0.25733 0.70767 0.25733t0.70182-0.25733q0.29243-0.25734 0.38015-0.67843 0.04094-0.19885 0.04094-0.74861v-5.2403h1.4972v5.2403q0 0.82464-0.09358 1.2223-0.19885 0.82464-0.88897 1.3452-0.68428 0.52051-1.6376 0.52051-0.95331 0-1.6434-0.52051-0.68428-0.52052-0.88312-1.3452-0.09358-0.3977-0.09358-1.2223z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1380" d="m93.607 143.98v2.3862h0.36261q0.50297 0 0.73106-0.0468 0.3977-0.0819 0.63164-0.39185 0.23979-0.30997 0.23979-0.75445 0-0.44449-0.23979-0.75446-0.23394-0.30997-0.63164-0.39185-0.22809-0.0468-0.73106-0.0468zm-1.4972-1.4036h1.8481q0.86558 0 1.234 0.0877 0.83049 0.19885 1.351 0.87142 0.52637 0.66673 0.52637 1.6376 0 0.97085-0.52637 1.6434-0.52052 0.66673-1.351 0.86558-0.36846 0.0877-1.234 0.0877h-0.35091v3.0412h-1.4972z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + <g id="text1246" stroke-width=".22458" aria-label="TOOLS"> + <path id="path1361" d="m102.96 142.58v1.4036h-1.8715v6.8311h-1.4972v-6.8311h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1363" d="m105.19 145.19q-0.0877 0.47958-0.0877 1.5089t0.0877 1.5089q0.10528 0.56731 0.38601 0.93576 0.28657 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36845 0.39185-0.93576 0.0877-0.48542 0.0877-1.5089 0-1.0293-0.0877-1.5089-0.10527-0.5673-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45619 0-0.74276 0.36846-0.28073 0.36261-0.38601 0.92991zm-1.4212 3.55q-0.16376-0.64919-0.16376-2.0411 0-1.392 0.16376-2.0411 0.23394-0.94746 0.88312-1.5557 0.64919-0.6141 1.6668-0.6141t1.6668 0.6141q0.64919 0.60824 0.88313 1.5557 0.16375 0.64918 0.16375 2.0411 0 1.3919-0.16375 2.0411-0.23394 0.94746-0.88313 1.5616-0.64918 0.60824-1.6668 0.60824t-1.6668-0.60824q-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1365" d="m111.18 145.19q-0.0877 0.47958-0.0877 1.5089t0.0877 1.5089q0.10527 0.56731 0.386 0.93576 0.28658 0.36261 0.74276 0.36261t0.73691-0.36261q0.28658-0.36845 0.39185-0.93576 0.0877-0.48542 0.0877-1.5089 0-1.0293-0.0877-1.5089-0.10527-0.5673-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846t-0.74276 0.36846q-0.28073 0.36261-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64919-0.16376-2.0411 0-1.392 0.16376-2.0411 0.23394-0.94746 0.88313-1.5557 0.64918-0.6141 1.6668-0.6141t1.6668 0.6141q0.64919 0.60824 0.88313 1.5557 0.16376 0.64918 0.16376 2.0411 0 1.3919-0.16376 2.0411-0.23394 0.94746-0.88313 1.5616-0.64918 0.60824-1.6668 0.60824t-1.6668-0.60824q-0.64919-0.61409-0.88313-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1367" d="m116.04 150.81v-8.2347h1.4972v6.8311h3.3687v1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1369" d="m124.15 150.91q-0.95916 0-2.164-0.18715v-1.427q1.4095 0.21054 2.164 0.21054 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28072-0.41524-0.386-0.16961-0.0643-0.46789-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69013 1.661-0.69013 0.78954 0 2.1639 0.14621v1.4095q-1.4212-0.15207-2.1639-0.15207-0.22225 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25733-0.0877 0.15791-0.12867 0.32167-0.0351 0.16376-0.0351 0.32167 0 0.69012 0.46788 0.90067 0.15206 0.0702 0.69013 0.15791 0.54391 0.0819 0.87727 0.25733 0.46788 0.23979 0.77201 0.61994 0.30412 0.38016 0.42109 0.76616t0.11697 0.80124q0 1.1346-0.70182 1.8247t-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + <path id="rect1252" d="m45.345 167.28h82.571c0.28796 0 0.51978 0.23182 0.51978 0.51978v22.171c0 0.28796-0.23182 0.51978-0.51978 0.51978h-82.571c-0.28796 0-0.51978-0.23182-0.51978-0.51978v-22.171c0-0.28796 0.23182-0.51978 0.51978-0.51978z" style="paint-order:markers fill stroke"/> + <path id="rect1254" transform="matrix(.26458 0 0 .26458 -113.76 68.446)" d="m645.59 394.06c-2.1498-0.02041-6.2168 0.06055-6.2168 0.06055-0.19725 0-0.35547 0.16744-0.35547 0.37304v2.5234h-3.7051v-2.5234c0-0.2056-0.15821-0.37304-0.35547-0.37304h-4.416c-0.19725 0-0.35742 0.16744-0.35742 0.37304v9.3535c0 0.20561 0.16001 0.37109 0.35742 0.37109h4.416c0.19726 0 0.35547-0.16548 0.35547-0.37109v-2.5234h3.7051v2.5234c0 0.20561 0.15825 0.37109 0.35547 0.37109h2.2363v11.904c-1.2817 0.50867-2.1856 1.7516-2.1856 3.2188v18.037c0 1.9216 1.5471 3.4688 3.4688 3.4688s3.4688-1.5471 3.4688-3.4688v-18.037c0-1.4665-0.90278-2.7096-2.1836-3.2188v-11.904h2.4199c0.38561-0.3394 0.75677-1.7031 2.8164-2.7441 5.5309-1.9 7.9328 2.808 7.8906 2.1582-0.04505-0.69374-0.35053-5.0458-5.1797-7.5977-2.1039-0.94609-4.5788-1.8754-5.2461-1.9141-0.06886-0.03629-0.57246-0.05375-1.2891-0.06055z" fill="#fff" stroke="#fff" stroke-width=".82412" style="paint-order:markers fill stroke"/> + <path id="path1262" transform="matrix(.26458 0 0 .26458 -113.76 68.446)" d="m650.43 379.6c-1.8975 0.03152-3.3994 0.81663-4.3105 1.4941-1.2149 0.90323-1.6057 1.4212-3.4434 1.4258-1.8376 0.00378-2.2279-0.51301-3.4473-1.4102-1.2194-0.89718-3.4899-1.9778-6.3379-1.207-2.848 0.77068-4.2647 2.8506-4.8652 4.2402-0.60051 1.3896-0.67842 2.0323-2.2676 2.9551-1.5892 0.92281-2.1868 0.67314-3.6914 0.50586-1.5046-0.16743-4.0146 0.03049-6.0957 2.1211-2.0811 2.0914-2.2685 4.6037-2.0938 6.1074 0.17473 1.5038 0.42851 2.0996-0.48633 3.6934-0.91487 1.5937-1.5548 1.6738-2.9414 2.2812-1.3867 0.60741-3.4622 2.031-4.2188 4.8828-0.75654 2.8518 0.34098 5.1191 1.2441 6.334 0.90319 1.2149 1.4192 1.6038 1.4238 3.4414 0.00454 1.8376-0.51104 2.2279-1.4082 3.4473-0.89718 1.2194-1.9797 3.4918-1.209 6.3398 0.77071 2.848 2.8506 4.2628 4.2402 4.8633 1.3896 0.60049 2.0323 0.68039 2.9551 2.2695 0.92276 1.5892 0.67316 2.1849 0.50586 3.6895-0.16728 1.5046 0.0297 4.0166 2.1211 6.0976 2.0915 2.081 4.6037 2.2627 6.1074 2.0879 1.5037-0.17461 2.0996-0.42268 3.6934 0.49219 1.5937 0.91483 1.6738 1.5528 2.2812 2.9394 0.60742 1.3866 2.031 3.4622 4.8828 4.2188 2.8518 0.75655 6.3494-0.68758 6.334-1.2422-0.01587-0.55461-2.3574 0.39899-4.7051-1.0781-2.3477-1.4772-2.1797-2.3888-2.6973-3.5703-0.51759-1.1816-1.4424-3.4001-4.0254-4.8828-2.5829-1.4827-4.9648-1.1626-6.2461-1.0137-1.2813 0.14892-1.3671 0.40956-2.6894-0.90625-1.3224-1.3158-1.0586-1.3996-0.91602-2.6816 0.14257-1.282 0.44867-3.6628-1.0469-6.2383-1.4956-2.5756-3.7163-3.4922-4.9004-4.0039-1.1841-0.51171-1.3857-0.32643-1.873-2.127-0.4873-1.8008-0.22347-1.7481 0.54102-2.7871 0.76447-1.039 2.2242-2.9456 2.2168-5.9238-0.00718-2.9783-1.4764-4.8828-2.2461-5.918-0.76963-1.0352-1.037-0.97812-0.55859-2.7812 0.47833-1.8031 0.68366-1.6172 1.8652-2.1348 1.1816-0.51757 3.4001-1.4444 4.8828-4.0273 1.4827-2.583 1.1626-4.9628 1.0137-6.2441-0.14883-1.2813-0.4076-1.3671 0.90821-2.6895s1.3976-1.0644 2.6797-0.92187c1.282 0.14249 3.6647 0.45454 6.2402-1.041 2.5755-1.4955 3.4864-3.7163 3.998-4.9004 0.5117-1.1842 0.33013-1.3877 2.1309-1.875 1.8007-0.48733 1.7481-0.22151 2.7871 0.54297 1.039 0.76449 2.9456 2.2242 5.9238 2.2168 2.9783-0.00756 4.8789-1.4765 5.9141-2.2461 1.0352-0.76966 0.97813-1.0369 2.7812-0.55859 1.8031 0.47834 1.6211 0.68368 2.1387 1.8652 0.51758 1.1816 1.4444 3.4001 4.0273 4.8828s4.9648 1.1626 6.2461 1.0137c1.2813-0.14891 1.3651-0.40956 2.6875 0.90625 1.3224 1.3158 1.0605 1.3996 0.91796 2.6816-0.14252 1.282-0.45062 3.6627 1.0449 6.2383 1.4955 2.5755 3.7162 3.4883 4.9004 4 1.1841 0.51171 1.3877 0.33014 1.875 2.1309 0.4873 1.8007 0.22346 1.7481-0.54102 2.7871-0.76448 1.039-2.2262 2.9456-2.2188 5.9238 0.00756 2.9783 1.4765 4.8769 2.2461 5.9121 0.76965 1.0352 1.0369 0.98006 0.5586 2.7832-0.47834 1.8031-0.68366 1.6211-1.8652 2.1387-1.1816 0.51757-3.4001 1.4444-4.8828 4.0274-1.4827 2.583-1.1606 4.9628-1.0117 6.2441 0.14892 1.2813 0.40761 1.3671-0.9082 2.6894s-1.3996 1.0605-2.6816 0.91797c-1.2821-0.14249-3.6627-0.4506-6.2383 1.0449-2.5756 1.4956-3.4922 3.7163-4.0039 4.9004-0.42128 0.97492-0.43101 1.2897-1.4043 1.6406-0.13992-0.07663-0.29747-0.15087-0.48828-0.22266-1.6607-0.62483-1.8791 0.21908-3.2422 1.2344-1.2233 0.91121-2.0946 1.027-2.0469 2.2246-0.11719-0.01206-0.22826-0.01415-0.3164-0.0039-0.27829 0.03401-0.38865 0.07948-0.6836-0.08985-0.29495-0.16932-0.30944-0.28829-0.42187-0.54492-0.11241-0.25663-0.3746-0.64125-0.90235-0.78125-0.52779-0.13984-1.1767 0.12767-1.1738 0.23047 0.00294 0.10243 0.43663-0.07442 0.8711 0.19922 0.43447 0.27326 0.40421 0.44343 0.5 0.66211 0.09573 0.21883 0.26613 0.62791 0.74414 0.90234 0.47801 0.2744 0.91911 0.21509 1.1562 0.1875 0.17999-0.02008 0.25507-0.01352 0.37695 0.07813 0.37226 1.1966 1.2449 1.0814 2.752 1.6484 1.6608 0.62483 2.1701 1.3766 3.5332 0.36133 0.99774-0.74318 0.89279-1.2827 0.65234-2.1582 2.2207-0.92783 3.3884-2.6886 3.918-3.9141 0.60051-1.3897 0.67842-2.0323 2.2676-2.9551s2.1868-0.67314 3.6914-0.50586c1.5046 0.16743 4.0146-0.03045 6.0957-2.1211 2.0811-2.0914 2.2685-4.6037 2.0938-6.1074-0.17477-1.5037-0.42853-2.0977 0.48633-3.6914 0.91486-1.5938 1.5548-1.6758 2.9414-2.2832 1.3866-0.6074 3.4622-2.031 4.2188-4.8828 0.75654-2.8518-0.34091-5.1191-1.2441-6.334-0.90322-1.2149-1.4193-1.6038-1.4238-3.4414-0.00453-1.8376 0.51104-2.2279 1.4082-3.4473 0.89716-1.2194 1.9797-3.4919 1.209-6.3398-0.77071-2.848-2.8506-4.2628-4.2402-4.8633-1.3896-0.60053-2.0323-0.6784-2.9551-2.2676-0.92278-1.5891-0.67315-2.1868-0.50586-3.6914 0.16728-1.5046-0.02971-4.0147-2.1211-6.0957-2.0914-2.0811-4.6037-2.2685-6.1074-2.0938-1.5037 0.17462-2.0996 0.42659-3.6934-0.48828-1.5937-0.91487-1.6738-1.5528-2.2812-2.9394-0.60741-1.3867-2.033-3.4622-4.8848-4.2188-0.71295-0.18913-1.389-0.26246-2.0215-0.25195z" fill="#fff" style="paint-order:markers fill stroke"/> + <g id="text1274" fill="#fff" stroke-width=".22458" aria-label="SETUP"> + <path id="path1333" d="m70.306 183.09q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25733-0.08773 0.15791-0.12867 0.32167-0.03509 0.16376-0.03509 0.32167 0 0.69012 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25733 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1335" d="m74.16 174.76h4.4917v1.4036h-2.9944v1.9885h2.2458v1.4036h-2.2458v2.0353h2.9944v1.4036h-4.4917z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1337" d="m85.009 174.76v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1339" d="m85.752 174.76h1.4972v5.2403q0 0.54976 0.04094 0.74861 0.08773 0.42109 0.3743 0.67842 0.29242 0.25734 0.70767 0.25734t0.70182-0.25734q0.29243-0.25733 0.38015-0.67842 0.04094-0.19885 0.04094-0.74861v-5.2403h1.4972v5.2403q0 0.82464-0.09358 1.2223-0.19885 0.82464-0.88897 1.3452-0.68428 0.52052-1.6376 0.52052-0.95331 0-1.6434-0.52052-0.68428-0.52051-0.88312-1.3452-0.09358-0.3977-0.09358-1.2223z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1341" d="m93.607 176.17v2.3862h0.36261q0.50297 0 0.73106-0.0468 0.3977-0.0819 0.63164-0.39185 0.23979-0.30997 0.23979-0.75446 0-0.44448-0.23979-0.75445-0.23394-0.30998-0.63164-0.39185-0.22809-0.0468-0.73106-0.0468zm-1.4972-1.4036h1.8481q0.86558 0 1.234 0.0877 0.83049 0.19885 1.351 0.87143 0.52637 0.66673 0.52637 1.6376 0 0.97086-0.52637 1.6434-0.52052 0.66673-1.351 0.86558-0.36846 0.0877-1.234 0.0877h-0.35091v3.0412h-1.4972z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + <g id="text1278" fill="#fff" stroke-width=".22458" aria-label="TOOLS"> + <path id="path1344" d="m102.96 174.76v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1346" d="m105.19 177.37q-0.0877 0.47958-0.0877 1.5089 0 1.0293 0.0877 1.5089 0.10528 0.56731 0.38601 0.93576 0.28657 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36845 0.39185-0.93576 0.0877-0.48543 0.0877-1.5089 0-1.0293-0.0877-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45619 0-0.74276 0.36846-0.28073 0.3626-0.38601 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94746 0.88312-1.5557 0.64919-0.61409 1.6668-0.61409t1.6668 0.61409q0.64919 0.60825 0.88313 1.5557 0.16375 0.64918 0.16375 2.0411 0 1.392-0.16375 2.0411-0.23394 0.94746-0.88313 1.5616-0.64918 0.60824-1.6668 0.60824t-1.6668-0.60824q-0.64918-0.6141-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1348" d="m111.18 177.37q-0.0877 0.47958-0.0877 1.5089 0 1.0293 0.0877 1.5089 0.10527 0.56731 0.386 0.93576 0.28658 0.36261 0.74276 0.36261t0.73691-0.36261q0.28658-0.36845 0.39185-0.93576 0.0877-0.48543 0.0877-1.5089 0-1.0293-0.0877-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846t-0.74276 0.36846q-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94746 0.88313-1.5557 0.64918-0.61409 1.6668-0.61409t1.6668 0.61409q0.64919 0.60825 0.88313 1.5557 0.16376 0.64918 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88313 1.5616-0.64918 0.60824-1.6668 0.60824t-1.6668-0.60824q-0.64919-0.6141-0.88313-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1350" d="m116.04 183v-8.2347h1.4972v6.831h3.3687v1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1352" d="m124.15 183.09q-0.95916 0-2.164-0.18715v-1.427q1.4095 0.21055 2.164 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46789-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78954 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22225 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25733-0.0877 0.15791-0.12867 0.32167-0.0351 0.16376-0.0351 0.32167 0 0.69012 0.46788 0.90067 0.15206 0.0702 0.69013 0.15791 0.54391 0.0819 0.87727 0.25733 0.46788 0.23979 0.77201 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + </g> </svg> diff --git a/docs/images/logo-inline-negative.svg b/docs/images/logo-inline-negative.svg index deed96e6..4bf63cfe 100644 --- a/docs/images/logo-inline-negative.svg +++ b/docs/images/logo-inline-negative.svg @@ -1,105 +1,35 @@ -<?xml version="1.0" encoding="UTF-8" standalone="no"?> -<svg - xmlns:dc="http://purl.org/dc/elements/1.1/" - xmlns:cc="http://creativecommons.org/ns#" - xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" - xmlns:svg="http://www.w3.org/2000/svg" - xmlns="http://www.w3.org/2000/svg" - width="83.810921mm" - height="23.41044mm" - viewBox="0 0 83.810921 23.41044" - version="1.1" - id="svg1021"> - <defs - id="defs1015" /> - <metadata - id="metadata1018"> - <rdf:RDF> - <cc:Work - rdf:about=""> - <dc:format>image/svg+xml</dc:format> - <dc:type - rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> - <dc:title></dc:title> - </cc:Work> - </rdf:RDF> - </metadata> - <rect - ry="0.51978147" - y="0.1" - x="0.1" - height="23.21044" - width="83.610924" - id="rect830" - style="fill:#336790;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.48264033;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <g - id="layer1" - transform="translate(-2.6008009,-95.497324)"> - <g - id="g1751" - transform="matrix(0.45669594,0,0,0.45669594,-11.041899,60.847347)"> - <path - style="fill:#e5b62f;fill-opacity:1;fill-rule:nonzero;stroke:#e5b62f;stroke-width:0.4774465;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" - d="m 56.863889,87.985612 c -1.245468,-0.01183 -3.601847,0.03564 -3.601847,0.03564 -0.114287,0 -0.20619,0.09589 -0.20619,0.214976 v 1.46244 H 50.90973 v -1.46244 c 0,-0.1191 -0.0919,-0.214976 -0.206189,-0.214976 h -2.559017 c -0.114286,0 -0.206705,0.09589 -0.206705,0.214976 v 5.418788 c 0,0.1191 0.09235,0.214977 0.206705,0.214977 h 2.559017 c 0.114287,0 0.206189,-0.09587 0.206189,-0.214977 v -1.461923 h 2.146122 v 1.461923 c 0,0.1191 0.09191,0.214977 0.20619,0.214977 h 1.295011 v 6.895187 c -0.742529,0.29471 -1.265555,1.01661 -1.265555,1.86655 v 10.45001 c 0,1.11329 0.895885,2.00918 2.00918,2.00918 1.113292,0 2.009695,-0.89589 2.009695,-2.00918 v -10.45001 c 0,-0.84994 -0.523022,-1.57184 -1.265555,-1.86655 v -6.895187 h 1.402499 c 0.223398,-0.196684 0.438705,-0.985378 1.631942,-1.588534 3.204289,-1.100736 4.595703,1.625992 4.571295,1.249532 -0.02611,-0.401913 -0.203636,-2.922341 -3.001367,-4.400762 -1.218895,-0.548108 -2.652008,-1.086577 -3.038575,-1.108977 -0.03989,-0.02102 -0.331568,-0.03172 -0.746723,-0.03564 z" - id="rect934" /> - <path - style="fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.19989915;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" - d="m 59.665281,79.605246 c -1.099312,0.01828 -1.969648,0.473642 -2.497521,0.866095 -0.703832,0.523277 -0.929574,0.822626 -1.994194,0.825277 -1.06462,0.0027 -1.290873,-0.296205 -1.997295,-0.815972 -0.706421,-0.519766 -2.022679,-1.14672 -3.672644,-0.700215 -1.649965,0.446506 -2.470018,1.651613 -2.817916,2.456698 -0.347897,0.80508 -0.393465,1.177432 -1.314132,1.712036 -0.920666,0.534609 -1.266695,0.389926 -2.13837,0.293006 -0.871661,-0.09692 -2.325918,0.0172 -3.531568,1.228868 -1.205665,1.211665 -1.314089,2.667116 -1.212846,3.538286 0.101228,0.871175 0.247854,1.216089 -0.282153,2.139404 -0.530022,0.923314 -0.900427,0.969986 -1.703772,1.321884 -0.803345,0.351898 -2.005999,1.177126 -2.444296,2.829281 -0.438296,1.652161 0.197124,2.965197 0.72037,3.66903 0.523262,0.703833 0.822625,0.929572 0.825273,1.994196 0.0026,1.06462 -0.296213,1.29035 -0.815971,1.99677 -0.519773,0.70643 -1.146203,2.02269 -0.699699,3.67265 0.446504,1.64996 1.651089,2.47053 2.456181,2.81843 0.805077,0.3479 1.17743,0.39347 1.71204,1.31414 0.534596,0.92066 0.389929,1.26618 0.293006,2.13785 -0.09692,0.87166 0.01712,2.32643 1.228865,3.53208 1.211669,1.20565 2.667112,1.311 3.538286,1.20975 0.871174,-0.10126 1.21609,-0.24477 2.139405,0.28525 0.923314,0.53002 0.96998,0.90043 1.321882,1.70377 0.351903,0.80334 1.177131,2.00548 2.829285,2.44378 1.652155,0.43829 3.67799,-0.39906 3.669028,-0.72037 -0.0091,-0.3213 -1.365829,0.23152 -2.725932,-0.62425 -1.360103,-0.85577 -1.262319,-1.38407 -1.562179,-2.06861 -0.299861,-0.68453 -0.836256,-1.96976 -2.332674,-2.82876 -1.496404,-0.859 -2.876052,-0.67281 -3.618384,-0.58653 -0.742333,0.0863 -0.791945,0.23675 -1.558045,-0.52555 -0.7661,-0.76231 -0.613297,-0.81116 -0.530717,-1.55391 0.08259,-0.74275 0.260268,-2.12213 -0.606165,-3.61425 -0.866433,-1.49213 -2.153087,-2.02279 -2.839103,-2.31924 -0.686016,-0.29645 -0.803407,-0.18822 -1.085722,-1.23145 -0.282315,-1.04323 -0.129234,-1.01345 0.313676,-1.61541 0.442894,-0.60195 1.288974,-1.7064 1.284676,-3.43183 -0.0042,-1.725427 -0.855847,-2.828991 -1.30173,-3.428727 -0.445882,-0.599742 -0.600626,-0.566654 -0.323494,-1.611273 0.277117,-1.044619 0.396537,-0.936759 1.081071,-1.23662 0.684535,-0.299854 1.969763,-0.836259 2.828768,-2.332673 0.85899,-1.496409 0.672807,-2.876051 0.586528,-3.618385 -0.08624,-0.742328 -0.23624,-0.791936 0.526065,-1.558041 0.762306,-0.766106 0.810639,-0.616402 1.553394,-0.533819 0.74274,0.08258 2.122129,0.263369 3.61425,-0.603064 1.492121,-0.866434 2.020204,-2.153086 2.316655,-2.839103 0.29645,-0.686018 0.191317,-0.803408 1.234549,-1.085722 1.043233,-0.282314 1.012934,-0.129212 1.614888,0.313676 0.601956,0.442893 1.706409,1.288969 3.431833,1.284673 1.72543,-0.0043 2.825891,-0.855327 3.42563,-1.301209 0.599739,-0.445887 0.567168,-0.601134 1.611789,-0.324013 1.044619,0.27712 0.939345,0.396538 1.2392,1.081071 0.299857,0.684534 0.836779,1.969772 2.333191,2.828771 1.496414,0.858999 2.875536,0.67332 3.617867,0.587044 0.742334,-0.08628 0.791938,-0.236759 1.558044,0.525546 0.766107,0.762305 0.613818,0.810644 0.531236,1.553397 -0.08258,0.742748 -0.260784,2.122126 0.605648,3.614245 0.86643,1.492124 2.153086,2.020213 2.839103,2.316659 0.686017,0.296447 0.803923,0.191318 1.086239,1.23455 0.282313,1.043226 0.128696,1.012939 -0.314193,1.614885 -0.442896,0.601952 -1.288968,1.706403 -1.284676,3.431832 0.0043,1.72543 0.855845,2.82589 1.301729,3.42563 0.445883,0.59974 0.600615,0.56717 0.323496,1.61179 -0.277125,1.04462 -0.396539,0.93986 -1.081072,1.23972 -0.684536,0.29985 -1.96977,0.83626 -2.828769,2.33267 -0.858996,1.49642 -0.672804,2.87554 -0.586528,3.61787 0.08628,0.74233 0.236756,0.79194 -0.525548,1.55804 -0.762307,0.76611 -0.811159,0.61382 -1.553911,0.53124 -0.742751,-0.0826 -2.122128,-0.26027 -3.614251,0.60616 -1.492118,0.86644 -2.023308,2.15309 -2.319753,2.8391 -0.246329,0.57005 -0.222236,0.74776 -0.802536,0.95343 -0.08286,-0.046 -0.17925,-0.0898 -0.293521,-0.1328 -0.962139,-0.36199 -1.088237,0.12751 -1.877923,0.71571 -0.710128,0.52895 -1.218404,0.5911 -1.188558,1.28882 -0.06706,-0.007 -0.130334,-0.008 -0.180867,-0.002 -0.161229,0.0187 -0.224964,0.0454 -0.395841,-0.0527 -0.170875,-0.0981 -0.179305,-0.16656 -0.244429,-0.31522 -0.06513,-0.14868 -0.217729,-0.37158 -0.523484,-0.45269 -0.305771,-0.0811 -0.680696,0.0739 -0.679026,0.13332 0.0016,0.0594 0.252655,-0.0426 0.504361,0.11576 0.251707,0.15837 0.233379,0.25624 0.28887,0.38292 0.05548,0.1267 0.155085,0.36451 0.432017,0.52349 0.276934,0.15896 0.531828,0.12449 0.669208,0.10851 0.104899,-0.0122 0.136234,-0.0302 0.20774,0.0243 0.210754,0.72266 0.721156,0.6429 1.605587,0.97565 0.962139,0.36199 1.256184,0.7975 2.045869,0.20929 0.578536,-0.43092 0.52042,-0.74207 0.380855,-1.25005 1.285693,-0.53767 1.960354,-1.55784 2.267045,-2.26756 0.347901,-0.80508 0.393468,-1.17692 1.314133,-1.71153 0.920665,-0.5346 1.26619,-0.39043 2.137855,-0.29351 0.871664,0.0969 2.326431,-0.0167 3.532084,-1.22836 1.205653,-1.21166 1.313579,-2.66762 1.212329,-3.5388 -0.101259,-0.87117 -0.247349,-1.21557 0.28267,-2.13889 0.530016,-0.92331 0.90043,-0.9705 1.703771,-1.3224 0.803344,-0.35189 2.005486,-1.17661 2.443779,-2.82876 0.438293,-1.65216 -0.197101,-2.9652 -0.72037,-3.66903 -0.523275,-0.70383 -0.822109,-0.92957 -0.824755,-1.9942 -0.0026,-1.064617 0.296204,-1.290862 0.81597,-1.997291 0.519766,-0.706424 1.146207,-2.022685 0.699698,-3.672642 C 75.90923,93.54616 74.704122,92.726098 73.899037,92.3782 73.093955,92.030302 72.72212,91.984737 72.187513,91.064067 c -0.534601,-0.920664 -0.390437,-1.266702 -0.293521,-2.138366 0.09692,-0.87167 -0.0167,-2.325916 -1.228349,-3.531571 -1.211665,-1.20565 -2.667629,-1.314095 -3.538802,-1.212843 -0.87117,0.101258 -1.215573,0.247861 -2.138887,-0.282152 -0.923314,-0.530018 -0.970502,-0.900435 -1.3224,-1.703774 -0.351899,-0.803295 -1.176611,-2.00595 -2.828769,-2.444247 -0.413039,-0.109574 -0.805067,-0.151817 -1.171504,-0.145726 z" - id="path944" /> - </g> - <g - id="g1747" - transform="translate(0,-23.386941)"> - <g - id="text1023" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="SETUP"> - <path - id="path1713" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 28.181456,134.80041 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543912,0.0819 0.877276,0.25734 0.467881,0.23979 0.772004,0.61994 0.304122,0.38015 0.421092,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836431,0.69012 z" /> - <path - id="path1715" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 32.035622,126.47213 h 4.491654 v 1.40365 H 33.53284 v 1.98849 h 2.245827 v 1.40364 H 33.53284 v 2.03528 h 2.994436 v 1.40364 h -4.491654 z" /> - <path - id="path1717" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 42.884604,126.47213 v 1.40365 h -1.871523 v 6.83105 h -1.497218 v -6.83105 h -1.871522 v -1.40365 z" /> - <path - id="path1719" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 43.627365,126.47213 h 1.497218 v 5.24027 q 0,0.54976 0.04094,0.74861 0.08773,0.42109 0.374305,0.67842 0.292425,0.25734 0.707669,0.25734 0.415244,0 0.701821,-0.25734 0.292425,-0.25733 0.380153,-0.67842 0.04094,-0.19885 0.04094,-0.74861 v -5.24027 h 1.497218 v 5.24027 q 0,0.82464 -0.09358,1.22234 -0.198849,0.82463 -0.888973,1.34515 -0.684275,0.52052 -1.637582,0.52052 -0.953307,0 -1.643431,-0.52052 -0.684275,-0.52052 -0.883124,-1.34515 -0.09358,-0.3977 -0.09358,-1.22234 z" /> - <path - id="path1721" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - d="m 51.48191,127.87578 v 2.38619 h 0.362607 q 0.502972,0 0.731064,-0.0468 0.397698,-0.0819 0.631638,-0.39185 0.239789,-0.30997 0.239789,-0.75446 0,-0.44449 -0.239789,-0.75446 -0.23394,-0.30997 -0.631638,-0.39185 -0.228092,-0.0468 -0.731064,-0.0468 z m -1.497218,-1.40365 h 1.848128 q 0.865579,0 1.234035,0.0877 0.830488,0.19885 1.351006,0.87143 0.526365,0.66673 0.526365,1.63758 0,0.97085 -0.526365,1.64343 -0.520518,0.66673 -1.351006,0.86558 -0.368456,0.0877 -1.234035,0.0877 h -0.35091 v 3.04122 h -1.497218 z" /> - </g> - <g - id="text1661" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#e5b62f;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="TOOLS"> - <path - id="path1724" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 60.833675,126.47213 v 1.40365 h -1.871523 v 6.83105 h -1.497218 v -6.83105 h -1.871522 v -1.40365 z" /> - <path - id="path1726" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 63.067804,129.08057 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105274,0.5673 0.386002,0.93576 0.286577,0.36261 0.74276,0.36261 0.456184,0 0.736912,-0.36261 0.286577,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105273,-0.56731 -0.39185,-0.92991 -0.280728,-0.36846 -0.736912,-0.36846 -0.456183,0 -0.74276,0.36846 -0.280728,0.3626 -0.386002,0.92991 z m -1.421187,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04113 0.23394,-0.94745 0.883125,-1.5557 0.649184,-0.61409 1.666824,-0.61409 1.017641,0 1.666825,0.61409 0.649185,0.60825 0.883125,1.5557 0.163758,0.64919 0.163758,2.04113 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60825 -1.666825,0.60825 -1.01764,0 -1.666824,-0.60825 -0.649185,-0.61409 -0.883125,-1.56155 z" /> - <path - id="path1728" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 69.050828,129.08057 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105273,0.5673 0.386001,0.93576 0.286577,0.36261 0.742761,0.36261 0.456183,0 0.736912,-0.36261 0.286576,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105274,-0.56731 -0.39185,-0.92991 -0.280729,-0.36846 -0.736912,-0.36846 -0.456184,0 -0.742761,0.36846 -0.280728,0.3626 -0.386001,0.92991 z m -1.421188,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04113 0.233941,-0.94745 0.883125,-1.5557 0.649184,-0.61409 1.666825,-0.61409 1.01764,0 1.666824,0.61409 0.649185,0.60825 0.883125,1.5557 0.163758,0.64919 0.163758,2.04113 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60825 -1.666824,0.60825 -1.017641,0 -1.666825,-0.60825 -0.649184,-0.61409 -0.883125,-1.56155 z" /> - <path - id="path1730" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 73.916786,134.70683 v -8.2347 h 1.497218 v 6.83106 h 3.368741 v 1.40364 z" /> - <path - id="path1732" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 82.028666,134.80041 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543911,0.0819 0.877276,0.25734 0.467881,0.23979 0.772003,0.61994 0.304123,0.38015 0.421093,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836431,0.69012 z" /> - </g> - </g> +<?xml version="1.0" encoding="UTF-8"?> +<svg id="svg1021" width="83.811mm" height="23.41mm" version="1.1" viewBox="0 0 83.811 23.41" xmlns="http://www.w3.org/2000/svg" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"> + <metadata id="metadata1018"> + <rdf:RDF> + <cc:Work rdf:about=""> + <dc:format>image/svg+xml</dc:format> + <dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/> + <dc:title/> + </cc:Work> + </rdf:RDF> + </metadata> + <rect id="rect830" x=".1" y=".1" width="83.611" height="23.21" ry=".51978" fill="#336790" style="paint-order:markers fill stroke"/> + <g id="layer1" transform="translate(-2.6008 -95.497)"> + <g id="g1751" transform="matrix(.4567 0 0 .4567 -11.042 60.847)"> + <path id="rect934" d="m56.864 87.986c-1.2455-0.01183-3.6018 0.03564-3.6018 0.03564-0.11429 0-0.20619 0.09589-0.20619 0.21498v1.4624h-2.1461v-1.4624c0-0.1191-0.0919-0.21498-0.20619-0.21498h-2.559c-0.11429 0-0.2067 0.09589-0.2067 0.21498v5.4188c0 0.1191 0.09235 0.21498 0.2067 0.21498h2.559c0.11429 0 0.20619-0.09587 0.20619-0.21498v-1.4619h2.1461v1.4619c0 0.1191 0.09191 0.21498 0.20619 0.21498h1.295v6.8952c-0.74253 0.29471-1.2656 1.0166-1.2656 1.8666v10.45c0 1.1133 0.89588 2.0092 2.0092 2.0092 1.1133 0 2.0097-0.89589 2.0097-2.0092v-10.45c0-0.84994-0.52302-1.5718-1.2656-1.8666v-6.8952h1.4025c0.2234-0.19668 0.4387-0.98538 1.6319-1.5885 3.2043-1.1007 4.5957 1.626 4.5713 1.2495-0.02611-0.40191-0.20364-2.9223-3.0014-4.4008-1.2189-0.54811-2.652-1.0866-3.0386-1.109-0.03989-0.02102-0.33157-0.03172-0.74672-0.03564z" fill="#e5b62f" stroke="#e5b62f" stroke-width=".47745" style="paint-order:markers fill stroke"/> + <path id="path944" d="m59.665 79.605c-1.0993 0.01828-1.9696 0.47364-2.4975 0.8661-0.70383 0.52328-0.92957 0.82263-1.9942 0.82528-1.0646 0.0027-1.2909-0.2962-1.9973-0.81597-0.70642-0.51977-2.0227-1.1467-3.6726-0.70022-1.65 0.44651-2.47 1.6516-2.8179 2.4567-0.3479 0.80508-0.39346 1.1774-1.3141 1.712-0.92067 0.53461-1.2667 0.38993-2.1384 0.29301-0.87166-0.09692-2.3259 0.0172-3.5316 1.2289-1.2057 1.2117-1.3141 2.6671-1.2128 3.5383 0.10123 0.87118 0.24785 1.2161-0.28215 2.1394-0.53002 0.92331-0.90043 0.96999-1.7038 1.3219s-2.006 1.1771-2.4443 2.8293c-0.4383 1.6522 0.19712 2.9652 0.72037 3.669 0.52326 0.70383 0.82262 0.92957 0.82527 1.9942 0.0026 1.0646-0.29621 1.2904-0.81597 1.9968-0.51977 0.70643-1.1462 2.0227-0.6997 3.6726s1.6511 2.4705 2.4562 2.8184c0.80508 0.3479 1.1774 0.39347 1.712 1.3141 0.5346 0.92066 0.38993 1.2662 0.29301 2.1378-0.09692 0.87166 0.01712 2.3264 1.2289 3.5321 1.2117 1.2056 2.6671 1.311 3.5383 1.2098 0.87117-0.10126 1.2161-0.24477 2.1394 0.28525 0.92331 0.53002 0.96998 0.90043 1.3219 1.7038 0.3519 0.80334 1.1771 2.0055 2.8293 2.4438 1.6522 0.43829 3.678-0.39906 3.669-0.72037-0.0091-0.3213-1.3658 0.23152-2.7259-0.62425s-1.2623-1.3841-1.5622-2.0686c-0.29986-0.68453-0.83626-1.9698-2.3327-2.8288-1.4964-0.859-2.8761-0.67281-3.6184-0.58653-0.74233 0.0863-0.79194 0.23675-1.558-0.52555-0.7661-0.76231-0.6133-0.81116-0.53072-1.5539 0.08259-0.74275 0.26027-2.1221-0.60616-3.6142-0.86643-1.4921-2.1531-2.0228-2.8391-2.3192s-0.80341-0.18822-1.0857-1.2314-0.12923-1.0134 0.31368-1.6154c0.44289-0.60195 1.289-1.7064 1.2847-3.4318-0.0042-1.7254-0.85585-2.829-1.3017-3.4287-0.44588-0.59974-0.60063-0.56665-0.32349-1.6113 0.27712-1.0446 0.39654-0.93676 1.0811-1.2366 0.68454-0.29985 1.9698-0.83626 2.8288-2.3327 0.85899-1.4964 0.67281-2.8761 0.58653-3.6184-0.08624-0.74233-0.23624-0.79194 0.52606-1.558 0.76231-0.76611 0.81064-0.6164 1.5534-0.53382 0.74274 0.08258 2.1221 0.26337 3.6142-0.60306 1.4921-0.86643 2.0202-2.1531 2.3167-2.8391 0.29645-0.68602 0.19132-0.80341 1.2345-1.0857 1.0432-0.28231 1.0129-0.12921 1.6149 0.31368 0.60196 0.44289 1.7064 1.289 3.4318 1.2847 1.7254-0.0043 2.8259-0.85533 3.4256-1.3012 0.59974-0.44589 0.56717-0.60113 1.6118-0.32401 1.0446 0.27712 0.93934 0.39654 1.2392 1.0811 0.29986 0.68453 0.83678 1.9698 2.3332 2.8288 1.4964 0.859 2.8755 0.67332 3.6179 0.58704 0.74233-0.08628 0.79194-0.23676 1.558 0.52555 0.76611 0.7623 0.61382 0.81064 0.53124 1.5534-0.08258 0.74275-0.26078 2.1221 0.60565 3.6142 0.86643 1.4921 2.1531 2.0202 2.8391 2.3167 0.68602 0.29645 0.80392 0.19132 1.0862 1.2346 0.28231 1.0432 0.1287 1.0129-0.31419 1.6149-0.4429 0.60195-1.289 1.7064-1.2847 3.4318 0.0043 1.7254 0.85584 2.8259 1.3017 3.4256 0.44588 0.59974 0.60062 0.56717 0.3235 1.6118-0.27712 1.0446-0.39654 0.93986-1.0811 1.2397-0.68454 0.29985-1.9698 0.83626-2.8288 2.3327-0.859 1.4964-0.6728 2.8755-0.58653 3.6179 0.08628 0.74233 0.23676 0.79194-0.52555 1.558-0.76231 0.76611-0.81116 0.61382-1.5539 0.53124-0.74275-0.0826-2.1221-0.26027-3.6143 0.60616-1.4921 0.86644-2.0233 2.1531-2.3198 2.8391-0.24633 0.57005-0.22224 0.74776-0.80254 0.95343-0.08286-0.046-0.17925-0.0898-0.29352-0.1328-0.96214-0.36199-1.0882 0.12751-1.8779 0.71571-0.71013 0.52895-1.2184 0.5911-1.1886 1.2888-0.06706-7e-3 -0.13033-8e-3 -0.18087-2e-3 -0.16123 0.0187-0.22496 0.0454-0.39584-0.0527-0.17088-0.0981-0.1793-0.16656-0.24443-0.31522-0.06513-0.14868-0.21773-0.37158-0.52348-0.45269-0.30577-0.0811-0.6807 0.0739-0.67903 0.13332 0.0016 0.0594 0.25266-0.0426 0.50436 0.11576 0.25171 0.15837 0.23338 0.25624 0.28887 0.38292 0.05548 0.1267 0.15508 0.36451 0.43202 0.52349 0.27693 0.15896 0.53183 0.12449 0.66921 0.10851 0.1049-0.0122 0.13623-0.0302 0.20774 0.0243 0.21075 0.72266 0.72116 0.6429 1.6056 0.97565 0.96214 0.36199 1.2562 0.7975 2.0459 0.20929 0.57854-0.43092 0.52042-0.74207 0.38086-1.25 1.2857-0.53767 1.9604-1.5578 2.267-2.2676 0.3479-0.80508 0.39347-1.1769 1.3141-1.7115 0.92066-0.5346 1.2662-0.39043 2.1379-0.29351 0.87166 0.0969 2.3264-0.0167 3.5321-1.2284s1.3136-2.6676 1.2123-3.5388c-0.10126-0.87117-0.24735-1.2156 0.28267-2.1389 0.53002-0.92331 0.90043-0.9705 1.7038-1.3224 0.80334-0.35189 2.0055-1.1766 2.4438-2.8288 0.43829-1.6522-0.1971-2.9652-0.72037-3.669-0.52328-0.70383-0.82211-0.92957-0.82476-1.9942-0.0026-1.0646 0.2962-1.2909 0.81597-1.9973 0.51977-0.70642 1.1462-2.0227 0.6997-3.6726-0.4465-1.65-1.6516-2.47-2.4567-2.8179-0.80508-0.3479-1.1769-0.39346-1.7115-1.3141-0.5346-0.92066-0.39044-1.2667-0.29352-2.1384 0.09692-0.87167-0.0167-2.3259-1.2283-3.5316-1.2117-1.2056-2.6676-1.3141-3.5388-1.2128-0.87117 0.10126-1.2156 0.24786-2.1389-0.28215-0.92331-0.53002-0.9705-0.90044-1.3224-1.7038-0.3519-0.8033-1.1766-2.006-2.8288-2.4442-0.41304-0.10957-0.80507-0.15182-1.1715-0.14573z" fill="#fff" style="paint-order:markers fill stroke"/> </g> + <g id="g1747" transform="translate(0 -23.387)" stroke-width=".22458"> + <g id="text1023" fill="#fff" aria-label="SETUP"> + <path id="path1713" d="m28.181 134.8q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1715" d="m32.036 126.47h4.4917v1.4036h-2.9944v1.9885h2.2458v1.4036h-2.2458v2.0353h2.9944v1.4036h-4.4917z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1717" d="m42.885 126.47v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1719" d="m43.627 126.47h1.4972v5.2403q0 0.54976 0.04094 0.74861 0.08773 0.42109 0.3743 0.67842 0.29242 0.25734 0.70767 0.25734t0.70182-0.25734q0.29242-0.25733 0.38015-0.67842 0.04094-0.19885 0.04094-0.74861v-5.2403h1.4972v5.2403q0 0.82464-0.09358 1.2223-0.19885 0.82463-0.88897 1.3452-0.68428 0.52052-1.6376 0.52052t-1.6434-0.52052q-0.68428-0.52052-0.88312-1.3452-0.09358-0.3977-0.09358-1.2223z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1721" d="m51.482 127.88v2.3862h0.36261q0.50297 0 0.73106-0.0468 0.3977-0.0819 0.63164-0.39185 0.23979-0.30997 0.23979-0.75446t-0.23979-0.75446q-0.23394-0.30997-0.63164-0.39185-0.22809-0.0468-0.73106-0.0468zm-1.4972-1.4036h1.8481q0.86558 0 1.234 0.0877 0.83049 0.19885 1.351 0.87143 0.52636 0.66673 0.52636 1.6376t-0.52636 1.6434q-0.52052 0.66673-1.351 0.86558-0.36846 0.0877-1.234 0.0877h-0.35091v3.0412h-1.4972z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + <g id="text1661" fill="#e5b62f" aria-label="TOOLS"> + <path id="path1724" d="m60.834 126.47v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1726" d="m63.068 129.08q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94745 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825-1.0176 0-1.6668-0.60825-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1728" d="m69.051 129.08q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94745 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825-1.0176 0-1.6668-0.60825-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1730" d="m73.917 134.71v-8.2347h1.4972v6.8311h3.3687v1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1732" d="m82.029 134.8q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + </g> + </g> </svg> diff --git a/docs/images/logo-inline.svg b/docs/images/logo-inline.svg index 11ab7df7..6e45103d 100644 --- a/docs/images/logo-inline.svg +++ b/docs/images/logo-inline.svg @@ -1,97 +1,34 @@ -<?xml version="1.0" encoding="UTF-8" standalone="no"?> -<svg - xmlns:dc="http://purl.org/dc/elements/1.1/" - xmlns:cc="http://creativecommons.org/ns#" - xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" - xmlns:svg="http://www.w3.org/2000/svg" - xmlns="http://www.w3.org/2000/svg" - width="80.121315mm" - height="20mm" - viewBox="0 0 80.121315 20" - version="1.1" - id="svg1021"> - <defs - id="defs1015" /> - <metadata - id="metadata1018"> - <rdf:RDF> - <cc:Work - rdf:about=""> - <dc:format>image/svg+xml</dc:format> - <dc:type - rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> - <dc:title></dc:title> - </cc:Work> - </rdf:RDF> - </metadata> - <g - id="layer1" - transform="translate(-4.4456067,-97.202544)"> - <g - id="g1751" - transform="matrix(0.45669594,0,0,0.45669594,-11.041899,60.847347)"> - <path - style="fill:#e5b62f;fill-opacity:1;fill-rule:nonzero;stroke:#e5b62f;stroke-width:0.4774465;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" - d="m 56.863889,87.985612 c -1.245468,-0.01183 -3.601847,0.03564 -3.601847,0.03564 -0.114287,0 -0.20619,0.09589 -0.20619,0.214976 v 1.46244 H 50.90973 v -1.46244 c 0,-0.1191 -0.0919,-0.214976 -0.206189,-0.214976 h -2.559017 c -0.114286,0 -0.206705,0.09589 -0.206705,0.214976 v 5.418788 c 0,0.1191 0.09235,0.214977 0.206705,0.214977 h 2.559017 c 0.114287,0 0.206189,-0.09587 0.206189,-0.214977 v -1.461923 h 2.146122 v 1.461923 c 0,0.1191 0.09191,0.214977 0.20619,0.214977 h 1.295011 v 6.895187 c -0.742529,0.29471 -1.265555,1.01661 -1.265555,1.86655 v 10.45001 c 0,1.11329 0.895885,2.00918 2.00918,2.00918 1.113292,0 2.009695,-0.89589 2.009695,-2.00918 v -10.45001 c 0,-0.84994 -0.523022,-1.57184 -1.265555,-1.86655 v -6.895187 h 1.402499 c 0.223398,-0.196684 0.438705,-0.985378 1.631942,-1.588534 3.204289,-1.100736 4.595703,1.625992 4.571295,1.249532 -0.02611,-0.401913 -0.203636,-2.922341 -3.001367,-4.400762 -1.218895,-0.548108 -2.652008,-1.086577 -3.038575,-1.108977 -0.03989,-0.02102 -0.331568,-0.03172 -0.746723,-0.03564 z" - id="rect934" /> - <path - style="fill:#336790;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.19989915;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" - d="m 59.665281,79.605246 c -1.099312,0.01828 -1.969648,0.473642 -2.497521,0.866095 -0.703832,0.523277 -0.929574,0.822626 -1.994194,0.825277 -1.06462,0.0027 -1.290873,-0.296205 -1.997295,-0.815972 -0.706421,-0.519766 -2.022679,-1.14672 -3.672644,-0.700215 -1.649965,0.446506 -2.470018,1.651613 -2.817916,2.456698 -0.347897,0.80508 -0.393465,1.177432 -1.314132,1.712036 -0.920666,0.534609 -1.266695,0.389926 -2.13837,0.293006 -0.871661,-0.09692 -2.325918,0.0172 -3.531568,1.228868 -1.205665,1.211665 -1.314089,2.667116 -1.212846,3.538286 0.101228,0.871175 0.247854,1.216089 -0.282153,2.139404 -0.530022,0.923314 -0.900427,0.969986 -1.703772,1.321884 -0.803345,0.351898 -2.005999,1.177126 -2.444296,2.829281 -0.438296,1.652161 0.197124,2.965197 0.72037,3.66903 0.523262,0.703833 0.822625,0.929572 0.825273,1.994196 0.0026,1.06462 -0.296213,1.29035 -0.815971,1.99677 -0.519773,0.70643 -1.146203,2.02269 -0.699699,3.67265 0.446504,1.64996 1.651089,2.47053 2.456181,2.81843 0.805077,0.3479 1.17743,0.39347 1.71204,1.31414 0.534596,0.92066 0.389929,1.26618 0.293006,2.13785 -0.09692,0.87166 0.01712,2.32643 1.228865,3.53208 1.211669,1.20565 2.667112,1.311 3.538286,1.20975 0.871174,-0.10126 1.21609,-0.24477 2.139405,0.28525 0.923314,0.53002 0.96998,0.90043 1.321882,1.70377 0.351903,0.80334 1.177131,2.00548 2.829285,2.44378 1.652155,0.43829 3.67799,-0.39906 3.669028,-0.72037 -0.0091,-0.3213 -1.365829,0.23152 -2.725932,-0.62425 -1.360103,-0.85577 -1.262319,-1.38407 -1.562179,-2.06861 -0.299861,-0.68453 -0.836256,-1.96976 -2.332674,-2.82876 -1.496404,-0.859 -2.876052,-0.67281 -3.618384,-0.58653 -0.742333,0.0863 -0.791945,0.23675 -1.558045,-0.52555 -0.7661,-0.76231 -0.613297,-0.81116 -0.530717,-1.55391 0.08259,-0.74275 0.260268,-2.12213 -0.606165,-3.61425 -0.866433,-1.49213 -2.153087,-2.02279 -2.839103,-2.31924 -0.686016,-0.29645 -0.803407,-0.18822 -1.085722,-1.23145 -0.282315,-1.04323 -0.129234,-1.01345 0.313676,-1.61541 0.442894,-0.60195 1.288974,-1.7064 1.284676,-3.43183 -0.0042,-1.725427 -0.855847,-2.828991 -1.30173,-3.428727 -0.445882,-0.599742 -0.600626,-0.566654 -0.323494,-1.611273 0.277117,-1.044619 0.396537,-0.936759 1.081071,-1.23662 0.684535,-0.299854 1.969763,-0.836259 2.828768,-2.332673 0.85899,-1.496409 0.672807,-2.876051 0.586528,-3.618385 -0.08624,-0.742328 -0.23624,-0.791936 0.526065,-1.558041 0.762306,-0.766106 0.810639,-0.616402 1.553394,-0.533819 0.74274,0.08258 2.122129,0.263369 3.61425,-0.603064 1.492121,-0.866434 2.020204,-2.153086 2.316655,-2.839103 0.29645,-0.686018 0.191317,-0.803408 1.234549,-1.085722 1.043233,-0.282314 1.012934,-0.129212 1.614888,0.313676 0.601956,0.442893 1.706409,1.288969 3.431833,1.284673 1.72543,-0.0043 2.825891,-0.855327 3.42563,-1.301209 0.599739,-0.445887 0.567168,-0.601134 1.611789,-0.324013 1.044619,0.27712 0.939345,0.396538 1.2392,1.081071 0.299857,0.684534 0.836779,1.969772 2.333191,2.828771 1.496414,0.858999 2.875536,0.67332 3.617867,0.587044 0.742334,-0.08628 0.791938,-0.236759 1.558044,0.525546 0.766107,0.762305 0.613818,0.810644 0.531236,1.553397 -0.08258,0.742748 -0.260784,2.122126 0.605648,3.614245 0.86643,1.492124 2.153086,2.020213 2.839103,2.316659 0.686017,0.296447 0.803923,0.191318 1.086239,1.23455 0.282313,1.043226 0.128696,1.012939 -0.314193,1.614885 -0.442896,0.601952 -1.288968,1.706403 -1.284676,3.431832 0.0043,1.72543 0.855845,2.82589 1.301729,3.42563 0.445883,0.59974 0.600615,0.56717 0.323496,1.61179 -0.277125,1.04462 -0.396539,0.93986 -1.081072,1.23972 -0.684536,0.29985 -1.96977,0.83626 -2.828769,2.33267 -0.858996,1.49642 -0.672804,2.87554 -0.586528,3.61787 0.08628,0.74233 0.236756,0.79194 -0.525548,1.55804 -0.762307,0.76611 -0.811159,0.61382 -1.553911,0.53124 -0.742751,-0.0826 -2.122128,-0.26027 -3.614251,0.60616 -1.492118,0.86644 -2.023308,2.15309 -2.319753,2.8391 -0.246329,0.57005 -0.222236,0.74776 -0.802536,0.95343 -0.08286,-0.046 -0.17925,-0.0898 -0.293521,-0.1328 -0.962139,-0.36199 -1.088237,0.12751 -1.877923,0.71571 -0.710128,0.52895 -1.218404,0.5911 -1.188558,1.28882 -0.06706,-0.007 -0.130334,-0.008 -0.180867,-0.002 -0.161229,0.0187 -0.224964,0.0454 -0.395841,-0.0527 -0.170875,-0.0981 -0.179305,-0.16656 -0.244429,-0.31522 -0.06513,-0.14868 -0.217729,-0.37158 -0.523484,-0.45269 -0.305771,-0.0811 -0.680696,0.0739 -0.679026,0.13332 0.0016,0.0594 0.252655,-0.0426 0.504361,0.11576 0.251707,0.15837 0.233379,0.25624 0.28887,0.38292 0.05548,0.1267 0.155085,0.36451 0.432017,0.52349 0.276934,0.15896 0.531828,0.12449 0.669208,0.10851 0.104899,-0.0122 0.136234,-0.0302 0.20774,0.0243 0.210754,0.72266 0.721156,0.6429 1.605587,0.97565 0.962139,0.36199 1.256184,0.7975 2.045869,0.20929 0.578536,-0.43092 0.52042,-0.74207 0.380855,-1.25005 1.285693,-0.53767 1.960354,-1.55784 2.267045,-2.26756 0.347901,-0.80508 0.393468,-1.17692 1.314133,-1.71153 0.920665,-0.5346 1.26619,-0.39043 2.137855,-0.29351 0.871664,0.0969 2.326431,-0.0167 3.532084,-1.22836 1.205653,-1.21166 1.313579,-2.66762 1.212329,-3.5388 -0.101259,-0.87117 -0.247349,-1.21557 0.28267,-2.13889 0.530016,-0.92331 0.90043,-0.9705 1.703771,-1.3224 0.803344,-0.35189 2.005486,-1.17661 2.443779,-2.82876 0.438293,-1.65216 -0.197101,-2.9652 -0.72037,-3.66903 -0.523275,-0.70383 -0.822109,-0.92957 -0.824755,-1.9942 -0.0026,-1.064617 0.296204,-1.290862 0.81597,-1.997291 0.519766,-0.706424 1.146207,-2.022685 0.699698,-3.672642 C 75.90923,93.54616 74.704122,92.726098 73.899037,92.3782 73.093955,92.030302 72.72212,91.984737 72.187513,91.064067 c -0.534601,-0.920664 -0.390437,-1.266702 -0.293521,-2.138366 0.09692,-0.87167 -0.0167,-2.325916 -1.228349,-3.531571 -1.211665,-1.20565 -2.667629,-1.314095 -3.538802,-1.212843 -0.87117,0.101258 -1.215573,0.247861 -2.138887,-0.282152 -0.923314,-0.530018 -0.970502,-0.900435 -1.3224,-1.703774 -0.351899,-0.803295 -1.176611,-2.00595 -2.828769,-2.444247 -0.413039,-0.109574 -0.805067,-0.151817 -1.171504,-0.145726 z" - id="path944" /> - </g> - <g - id="g1747" - transform="translate(0,-23.386941)"> - <g - id="text1023" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#336790;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="SETUP"> - <path - id="path1713" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 28.181456,134.80041 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543912,0.0819 0.877276,0.25734 0.467881,0.23979 0.772004,0.61994 0.304122,0.38015 0.421092,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836431,0.69012 z" /> - <path - id="path1715" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 32.035622,126.47213 h 4.491654 v 1.40365 H 33.53284 v 1.98849 h 2.245827 v 1.40364 H 33.53284 v 2.03528 h 2.994436 v 1.40364 h -4.491654 z" /> - <path - id="path1717" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 42.884604,126.47213 v 1.40365 h -1.871523 v 6.83105 h -1.497218 v -6.83105 h -1.871522 v -1.40365 z" /> - <path - id="path1719" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 43.627365,126.47213 h 1.497218 v 5.24027 q 0,0.54976 0.04094,0.74861 0.08773,0.42109 0.374305,0.67842 0.292425,0.25734 0.707669,0.25734 0.415244,0 0.701821,-0.25734 0.292425,-0.25733 0.380153,-0.67842 0.04094,-0.19885 0.04094,-0.74861 v -5.24027 h 1.497218 v 5.24027 q 0,0.82464 -0.09358,1.22234 -0.198849,0.82463 -0.888973,1.34515 -0.684275,0.52052 -1.637582,0.52052 -0.953307,0 -1.643431,-0.52052 -0.684275,-0.52052 -0.883124,-1.34515 -0.09358,-0.3977 -0.09358,-1.22234 z" /> - <path - id="path1721" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 51.48191,127.87578 v 2.38619 h 0.362607 q 0.502972,0 0.731064,-0.0468 0.397698,-0.0819 0.631638,-0.39185 0.239789,-0.30997 0.239789,-0.75446 0,-0.44449 -0.239789,-0.75446 -0.23394,-0.30997 -0.631638,-0.39185 -0.228092,-0.0468 -0.731064,-0.0468 z m -1.497218,-1.40365 h 1.848128 q 0.865579,0 1.234035,0.0877 0.830488,0.19885 1.351006,0.87143 0.526365,0.66673 0.526365,1.63758 0,0.97085 -0.526365,1.64343 -0.520518,0.66673 -1.351006,0.86558 -0.368456,0.0877 -1.234035,0.0877 h -0.35091 v 3.04122 h -1.497218 z" /> - </g> - <g - id="text1661" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#e5b62f;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="TOOLS"> - <path - id="path1724" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 60.833675,126.47213 v 1.40365 h -1.871523 v 6.83105 h -1.497218 v -6.83105 h -1.871522 v -1.40365 z" /> - <path - id="path1726" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 63.067804,129.08057 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105274,0.5673 0.386002,0.93576 0.286577,0.36261 0.74276,0.36261 0.456184,0 0.736912,-0.36261 0.286577,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105273,-0.56731 -0.39185,-0.92991 -0.280728,-0.36846 -0.736912,-0.36846 -0.456183,0 -0.74276,0.36846 -0.280728,0.3626 -0.386002,0.92991 z m -1.421187,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04113 0.23394,-0.94745 0.883125,-1.5557 0.649184,-0.61409 1.666824,-0.61409 1.017641,0 1.666825,0.61409 0.649185,0.60825 0.883125,1.5557 0.163758,0.64919 0.163758,2.04113 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60825 -1.666825,0.60825 -1.01764,0 -1.666824,-0.60825 -0.649185,-0.61409 -0.883125,-1.56155 z" /> - <path - id="path1728" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 69.050828,129.08057 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105273,0.5673 0.386001,0.93576 0.286577,0.36261 0.742761,0.36261 0.456183,0 0.736912,-0.36261 0.286576,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105274,-0.56731 -0.39185,-0.92991 -0.280729,-0.36846 -0.736912,-0.36846 -0.456184,0 -0.742761,0.36846 -0.280728,0.3626 -0.386001,0.92991 z m -1.421188,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04113 0.233941,-0.94745 0.883125,-1.5557 0.649184,-0.61409 1.666825,-0.61409 1.01764,0 1.666824,0.61409 0.649185,0.60825 0.883125,1.5557 0.163758,0.64919 0.163758,2.04113 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60825 -1.666824,0.60825 -1.017641,0 -1.666825,-0.60825 -0.649184,-0.61409 -0.883125,-1.56155 z" /> - <path - id="path1730" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 73.916786,134.70683 v -8.2347 h 1.497218 v 6.83106 h 3.368741 v 1.40364 z" /> - <path - id="path1732" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 82.028666,134.80041 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543911,0.0819 0.877276,0.25734 0.467881,0.23979 0.772003,0.61994 0.304123,0.38015 0.421093,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836431,0.69012 z" /> - </g> - </g> +<?xml version="1.0" encoding="UTF-8"?> +<svg id="svg1021" width="80.121mm" height="20mm" version="1.1" viewBox="0 0 80.121 20" xmlns="http://www.w3.org/2000/svg" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"> + <metadata id="metadata1018"> + <rdf:RDF> + <cc:Work rdf:about=""> + <dc:format>image/svg+xml</dc:format> + <dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/> + <dc:title/> + </cc:Work> + </rdf:RDF> + </metadata> + <g id="layer1" transform="translate(-4.4456 -97.203)"> + <g id="g1751" transform="matrix(.4567 0 0 .4567 -11.042 60.847)"> + <path id="rect934" d="m56.864 87.986c-1.2455-0.01183-3.6018 0.03564-3.6018 0.03564-0.11429 0-0.20619 0.09589-0.20619 0.21498v1.4624h-2.1461v-1.4624c0-0.1191-0.0919-0.21498-0.20619-0.21498h-2.559c-0.11429 0-0.2067 0.09589-0.2067 0.21498v5.4188c0 0.1191 0.09235 0.21498 0.2067 0.21498h2.559c0.11429 0 0.20619-0.09587 0.20619-0.21498v-1.4619h2.1461v1.4619c0 0.1191 0.09191 0.21498 0.20619 0.21498h1.295v6.8952c-0.74253 0.29471-1.2656 1.0166-1.2656 1.8666v10.45c0 1.1133 0.89588 2.0092 2.0092 2.0092 1.1133 0 2.0097-0.89589 2.0097-2.0092v-10.45c0-0.84994-0.52302-1.5718-1.2656-1.8666v-6.8952h1.4025c0.2234-0.19668 0.4387-0.98538 1.6319-1.5885 3.2043-1.1007 4.5957 1.626 4.5713 1.2495-0.02611-0.40191-0.20364-2.9223-3.0014-4.4008-1.2189-0.54811-2.652-1.0866-3.0386-1.109-0.03989-0.02102-0.33157-0.03172-0.74672-0.03564z" fill="#e5b62f" stroke="#e5b62f" stroke-width=".47745" style="paint-order:markers fill stroke"/> + <path id="path944" d="m59.665 79.605c-1.0993 0.01828-1.9696 0.47364-2.4975 0.8661-0.70383 0.52328-0.92957 0.82263-1.9942 0.82528-1.0646 0.0027-1.2909-0.2962-1.9973-0.81597-0.70642-0.51977-2.0227-1.1467-3.6726-0.70022-1.65 0.44651-2.47 1.6516-2.8179 2.4567-0.3479 0.80508-0.39346 1.1774-1.3141 1.712-0.92067 0.53461-1.2667 0.38993-2.1384 0.29301-0.87166-0.09692-2.3259 0.0172-3.5316 1.2289-1.2057 1.2117-1.3141 2.6671-1.2128 3.5383 0.10123 0.87118 0.24785 1.2161-0.28215 2.1394-0.53002 0.92331-0.90043 0.96999-1.7038 1.3219s-2.006 1.1771-2.4443 2.8293c-0.4383 1.6522 0.19712 2.9652 0.72037 3.669 0.52326 0.70383 0.82262 0.92957 0.82527 1.9942 0.0026 1.0646-0.29621 1.2904-0.81597 1.9968-0.51977 0.70643-1.1462 2.0227-0.6997 3.6726s1.6511 2.4705 2.4562 2.8184c0.80508 0.3479 1.1774 0.39347 1.712 1.3141 0.5346 0.92066 0.38993 1.2662 0.29301 2.1378-0.09692 0.87166 0.01712 2.3264 1.2289 3.5321 1.2117 1.2056 2.6671 1.311 3.5383 1.2098 0.87117-0.10126 1.2161-0.24477 2.1394 0.28525 0.92331 0.53002 0.96998 0.90043 1.3219 1.7038 0.3519 0.80334 1.1771 2.0055 2.8293 2.4438 1.6522 0.43829 3.678-0.39906 3.669-0.72037-0.0091-0.3213-1.3658 0.23152-2.7259-0.62425s-1.2623-1.3841-1.5622-2.0686c-0.29986-0.68453-0.83626-1.9698-2.3327-2.8288-1.4964-0.859-2.8761-0.67281-3.6184-0.58653-0.74233 0.0863-0.79194 0.23675-1.558-0.52555-0.7661-0.76231-0.6133-0.81116-0.53072-1.5539 0.08259-0.74275 0.26027-2.1221-0.60616-3.6142-0.86643-1.4921-2.1531-2.0228-2.8391-2.3192s-0.80341-0.18822-1.0857-1.2314-0.12923-1.0134 0.31368-1.6154c0.44289-0.60195 1.289-1.7064 1.2847-3.4318-0.0042-1.7254-0.85585-2.829-1.3017-3.4287-0.44588-0.59974-0.60063-0.56665-0.32349-1.6113 0.27712-1.0446 0.39654-0.93676 1.0811-1.2366 0.68454-0.29985 1.9698-0.83626 2.8288-2.3327 0.85899-1.4964 0.67281-2.8761 0.58653-3.6184-0.08624-0.74233-0.23624-0.79194 0.52606-1.558 0.76231-0.76611 0.81064-0.6164 1.5534-0.53382 0.74274 0.08258 2.1221 0.26337 3.6142-0.60306 1.4921-0.86643 2.0202-2.1531 2.3167-2.8391 0.29645-0.68602 0.19132-0.80341 1.2345-1.0857 1.0432-0.28231 1.0129-0.12921 1.6149 0.31368 0.60196 0.44289 1.7064 1.289 3.4318 1.2847 1.7254-0.0043 2.8259-0.85533 3.4256-1.3012 0.59974-0.44589 0.56717-0.60113 1.6118-0.32401 1.0446 0.27712 0.93934 0.39654 1.2392 1.0811 0.29986 0.68453 0.83678 1.9698 2.3332 2.8288 1.4964 0.859 2.8755 0.67332 3.6179 0.58704 0.74233-0.08628 0.79194-0.23676 1.558 0.52555 0.76611 0.7623 0.61382 0.81064 0.53124 1.5534-0.08258 0.74275-0.26078 2.1221 0.60565 3.6142 0.86643 1.4921 2.1531 2.0202 2.8391 2.3167 0.68602 0.29645 0.80392 0.19132 1.0862 1.2346 0.28231 1.0432 0.1287 1.0129-0.31419 1.6149-0.4429 0.60195-1.289 1.7064-1.2847 3.4318 0.0043 1.7254 0.85584 2.8259 1.3017 3.4256 0.44588 0.59974 0.60062 0.56717 0.3235 1.6118-0.27712 1.0446-0.39654 0.93986-1.0811 1.2397-0.68454 0.29985-1.9698 0.83626-2.8288 2.3327-0.859 1.4964-0.6728 2.8755-0.58653 3.6179 0.08628 0.74233 0.23676 0.79194-0.52555 1.558-0.76231 0.76611-0.81116 0.61382-1.5539 0.53124-0.74275-0.0826-2.1221-0.26027-3.6143 0.60616-1.4921 0.86644-2.0233 2.1531-2.3198 2.8391-0.24633 0.57005-0.22224 0.74776-0.80254 0.95343-0.08286-0.046-0.17925-0.0898-0.29352-0.1328-0.96214-0.36199-1.0882 0.12751-1.8779 0.71571-0.71013 0.52895-1.2184 0.5911-1.1886 1.2888-0.06706-7e-3 -0.13033-8e-3 -0.18087-2e-3 -0.16123 0.0187-0.22496 0.0454-0.39584-0.0527-0.17088-0.0981-0.1793-0.16656-0.24443-0.31522-0.06513-0.14868-0.21773-0.37158-0.52348-0.45269-0.30577-0.0811-0.6807 0.0739-0.67903 0.13332 0.0016 0.0594 0.25266-0.0426 0.50436 0.11576 0.25171 0.15837 0.23338 0.25624 0.28887 0.38292 0.05548 0.1267 0.15508 0.36451 0.43202 0.52349 0.27693 0.15896 0.53183 0.12449 0.66921 0.10851 0.1049-0.0122 0.13623-0.0302 0.20774 0.0243 0.21075 0.72266 0.72116 0.6429 1.6056 0.97565 0.96214 0.36199 1.2562 0.7975 2.0459 0.20929 0.57854-0.43092 0.52042-0.74207 0.38086-1.25 1.2857-0.53767 1.9604-1.5578 2.267-2.2676 0.3479-0.80508 0.39347-1.1769 1.3141-1.7115 0.92066-0.5346 1.2662-0.39043 2.1379-0.29351 0.87166 0.0969 2.3264-0.0167 3.5321-1.2284s1.3136-2.6676 1.2123-3.5388c-0.10126-0.87117-0.24735-1.2156 0.28267-2.1389 0.53002-0.92331 0.90043-0.9705 1.7038-1.3224 0.80334-0.35189 2.0055-1.1766 2.4438-2.8288 0.43829-1.6522-0.1971-2.9652-0.72037-3.669-0.52328-0.70383-0.82211-0.92957-0.82476-1.9942-0.0026-1.0646 0.2962-1.2909 0.81597-1.9973 0.51977-0.70642 1.1462-2.0227 0.6997-3.6726-0.4465-1.65-1.6516-2.47-2.4567-2.8179-0.80508-0.3479-1.1769-0.39346-1.7115-1.3141-0.5346-0.92066-0.39044-1.2667-0.29352-2.1384 0.09692-0.87167-0.0167-2.3259-1.2283-3.5316-1.2117-1.2056-2.6676-1.3141-3.5388-1.2128-0.87117 0.10126-1.2156 0.24786-2.1389-0.28215-0.92331-0.53002-0.9705-0.90044-1.3224-1.7038-0.3519-0.8033-1.1766-2.006-2.8288-2.4442-0.41304-0.10957-0.80507-0.15182-1.1715-0.14573z" fill="#336790" style="paint-order:markers fill stroke"/> </g> + <g id="g1747" transform="translate(0 -23.387)" stroke-width=".22458"> + <g id="text1023" fill="#336790" aria-label="SETUP"> + <path id="path1713" d="m28.181 134.8q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1715" d="m32.036 126.47h4.4917v1.4036h-2.9944v1.9885h2.2458v1.4036h-2.2458v2.0353h2.9944v1.4036h-4.4917z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1717" d="m42.885 126.47v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1719" d="m43.627 126.47h1.4972v5.2403q0 0.54976 0.04094 0.74861 0.08773 0.42109 0.3743 0.67842 0.29242 0.25734 0.70767 0.25734t0.70182-0.25734q0.29242-0.25733 0.38015-0.67842 0.04094-0.19885 0.04094-0.74861v-5.2403h1.4972v5.2403q0 0.82464-0.09358 1.2223-0.19885 0.82463-0.88897 1.3452-0.68428 0.52052-1.6376 0.52052t-1.6434-0.52052q-0.68428-0.52052-0.88312-1.3452-0.09358-0.3977-0.09358-1.2223z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1721" d="m51.482 127.88v2.3862h0.36261q0.50297 0 0.73106-0.0468 0.3977-0.0819 0.63164-0.39185 0.23979-0.30997 0.23979-0.75446t-0.23979-0.75446q-0.23394-0.30997-0.63164-0.39185-0.22809-0.0468-0.73106-0.0468zm-1.4972-1.4036h1.8481q0.86558 0 1.234 0.0877 0.83049 0.19885 1.351 0.87143 0.52636 0.66673 0.52636 1.6376t-0.52636 1.6434q-0.52052 0.66673-1.351 0.86558-0.36846 0.0877-1.234 0.0877h-0.35091v3.0412h-1.4972z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + <g id="text1661" fill="#e5b62f" aria-label="TOOLS"> + <path id="path1724" d="m60.834 126.47v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1726" d="m63.068 129.08q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94745 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825-1.0176 0-1.6668-0.60825-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1728" d="m69.051 129.08q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94745 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825-1.0176 0-1.6668-0.60825-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1730" d="m73.917 134.71v-8.2347h1.4972v6.8311h3.3687v1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1732" d="m82.029 134.8q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + </g> + </g> </svg> diff --git a/docs/images/logo-negative.svg b/docs/images/logo-negative.svg index 23a553d3..d2142045 100644 --- a/docs/images/logo-negative.svg +++ b/docs/images/logo-negative.svg @@ -1,106 +1,37 @@ -<?xml version="1.0" encoding="UTF-8" standalone="no"?> -<svg - xmlns:dc="http://purl.org/dc/elements/1.1/" - xmlns:cc="http://creativecommons.org/ns#" - xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" - xmlns:svg="http://www.w3.org/2000/svg" - xmlns="http://www.w3.org/2000/svg" - width="64.584335mm" - height="60.751602mm" - viewBox="0 0 64.584336 60.751602" - version="1.1" - id="svg1021"> - <defs - id="defs1015" /> - <metadata - id="metadata1018"> - <rdf:RDF> - <cc:Work - rdf:about=""> - <dc:format>image/svg+xml</dc:format> - <dc:type - rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> - <dc:title></dc:title> - </cc:Work> - </rdf:RDF> - </metadata> - <rect - ry="1.57368" - y="0.1" - x="0.1" - height="60.551601" - width="64.384338" - id="rect830" - style="fill:#336790;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.75281364;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <g - id="layer1" - transform="translate(-22.929862,-76.826813)"> - <g - id="g1769"> - <g - id="g1751"> - <path - id="rect934" - d="m 56.863889,87.985612 c -1.245468,-0.01183 -3.601847,0.03564 -3.601847,0.03564 -0.114287,0 -0.20619,0.09589 -0.20619,0.214976 v 1.46244 H 50.90973 v -1.46244 c 0,-0.1191 -0.0919,-0.214976 -0.206189,-0.214976 h -2.559017 c -0.114286,0 -0.206705,0.09589 -0.206705,0.214976 v 5.418788 c 0,0.1191 0.09235,0.214977 0.206705,0.214977 h 2.559017 c 0.114287,0 0.206189,-0.09587 0.206189,-0.214977 v -1.461923 h 2.146122 v 1.461923 c 0,0.1191 0.09191,0.214977 0.20619,0.214977 h 1.295011 v 6.895187 c -0.742529,0.29471 -1.265555,1.01661 -1.265555,1.86655 v 10.45001 c 0,1.11329 0.895885,2.00918 2.00918,2.00918 1.113292,0 2.009695,-0.89589 2.009695,-2.00918 v -10.45001 c 0,-0.84994 -0.523022,-1.57184 -1.265555,-1.86655 v -6.895187 h 1.402499 c 0.223398,-0.196684 0.438705,-0.985378 1.631942,-1.588534 3.204289,-1.100736 4.595703,1.625992 4.571295,1.249532 -0.02611,-0.401913 -0.203636,-2.922341 -3.001367,-4.400762 -1.218895,-0.548108 -2.652008,-1.086577 -3.038575,-1.108977 -0.03989,-0.02102 -0.331568,-0.03172 -0.746723,-0.03564 z" - style="fill:#e5b62f;fill-opacity:1;fill-rule:nonzero;stroke:#e5b62f;stroke-width:0.4774465;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="path944" - d="m 59.665281,79.605246 c -1.099312,0.01828 -1.969648,0.473642 -2.497521,0.866095 -0.703832,0.523277 -0.929574,0.822626 -1.994194,0.825277 -1.06462,0.0027 -1.290873,-0.296205 -1.997295,-0.815972 -0.706421,-0.519766 -2.022679,-1.14672 -3.672644,-0.700215 -1.649965,0.446506 -2.470018,1.651613 -2.817916,2.456698 -0.347897,0.80508 -0.393465,1.177432 -1.314132,1.712036 -0.920666,0.534609 -1.266695,0.389926 -2.13837,0.293006 -0.871661,-0.09692 -2.325918,0.0172 -3.531568,1.228868 -1.205665,1.211665 -1.314089,2.667116 -1.212846,3.538286 0.101228,0.871175 0.247854,1.216089 -0.282153,2.139404 -0.530022,0.923314 -0.900427,0.969986 -1.703772,1.321884 -0.803345,0.351898 -2.005999,1.177126 -2.444296,2.829281 -0.438296,1.652161 0.197124,2.965197 0.72037,3.66903 0.523262,0.703833 0.822625,0.929572 0.825273,1.994196 0.0026,1.06462 -0.296213,1.29035 -0.815971,1.99677 -0.519773,0.70643 -1.146203,2.02269 -0.699699,3.67265 0.446504,1.64996 1.651089,2.47053 2.456181,2.81843 0.805077,0.3479 1.17743,0.39347 1.71204,1.31414 0.534596,0.92066 0.389929,1.26618 0.293006,2.13785 -0.09692,0.87166 0.01712,2.32643 1.228865,3.53208 1.211669,1.20565 2.667112,1.311 3.538286,1.20975 0.871174,-0.10126 1.21609,-0.24477 2.139405,0.28525 0.923314,0.53002 0.96998,0.90043 1.321882,1.70377 0.351903,0.80334 1.177131,2.00548 2.829285,2.44378 1.652155,0.43829 3.67799,-0.39906 3.669028,-0.72037 -0.0091,-0.3213 -1.365829,0.23152 -2.725932,-0.62425 -1.360103,-0.85577 -1.262319,-1.38407 -1.562179,-2.06861 -0.299861,-0.68453 -0.836256,-1.96976 -2.332674,-2.82876 -1.496404,-0.859 -2.876052,-0.67281 -3.618384,-0.58653 -0.742333,0.0863 -0.791945,0.23675 -1.558045,-0.52555 -0.7661,-0.76231 -0.613297,-0.81116 -0.530717,-1.55391 0.08259,-0.74275 0.260268,-2.12213 -0.606165,-3.61425 -0.866433,-1.49213 -2.153087,-2.02279 -2.839103,-2.31924 -0.686016,-0.29645 -0.803407,-0.18822 -1.085722,-1.23145 -0.282315,-1.04323 -0.129234,-1.01345 0.313676,-1.61541 0.442894,-0.60195 1.288974,-1.7064 1.284676,-3.43183 -0.0042,-1.725427 -0.855847,-2.828991 -1.30173,-3.428727 -0.445882,-0.599742 -0.600626,-0.566654 -0.323494,-1.611273 0.277117,-1.044619 0.396537,-0.936759 1.081071,-1.23662 0.684535,-0.299854 1.969763,-0.836259 2.828768,-2.332673 0.85899,-1.496409 0.672807,-2.876051 0.586528,-3.618385 -0.08624,-0.742328 -0.23624,-0.791936 0.526065,-1.558041 0.762306,-0.766106 0.810639,-0.616402 1.553394,-0.533819 0.74274,0.08258 2.122129,0.263369 3.61425,-0.603064 1.492121,-0.866434 2.020204,-2.153086 2.316655,-2.839103 0.29645,-0.686018 0.191317,-0.803408 1.234549,-1.085722 1.043233,-0.282314 1.012934,-0.129212 1.614888,0.313676 0.601956,0.442893 1.706409,1.288969 3.431833,1.284673 1.72543,-0.0043 2.825891,-0.855327 3.42563,-1.301209 0.599739,-0.445887 0.567168,-0.601134 1.611789,-0.324013 1.044619,0.27712 0.939345,0.396538 1.2392,1.081071 0.299857,0.684534 0.836779,1.969772 2.333191,2.828771 1.496414,0.858999 2.875536,0.67332 3.617867,0.587044 0.742334,-0.08628 0.791938,-0.236759 1.558044,0.525546 0.766107,0.762305 0.613818,0.810644 0.531236,1.553397 -0.08258,0.742748 -0.260784,2.122126 0.605648,3.614245 0.86643,1.492124 2.153086,2.020213 2.839103,2.316659 0.686017,0.296447 0.803923,0.191318 1.086239,1.23455 0.282313,1.043226 0.128696,1.012939 -0.314193,1.614885 -0.442896,0.601952 -1.288968,1.706403 -1.284676,3.431832 0.0043,1.72543 0.855845,2.82589 1.301729,3.42563 0.445883,0.59974 0.600615,0.56717 0.323496,1.61179 -0.277125,1.04462 -0.396539,0.93986 -1.081072,1.23972 -0.684536,0.29985 -1.96977,0.83626 -2.828769,2.33267 -0.858996,1.49642 -0.672804,2.87554 -0.586528,3.61787 0.08628,0.74233 0.236756,0.79194 -0.525548,1.55804 -0.762307,0.76611 -0.811159,0.61382 -1.553911,0.53124 -0.742751,-0.0826 -2.122128,-0.26027 -3.614251,0.60616 -1.492118,0.86644 -2.023308,2.15309 -2.319753,2.8391 -0.246329,0.57005 -0.222236,0.74776 -0.802536,0.95343 -0.08286,-0.046 -0.17925,-0.0898 -0.293521,-0.1328 -0.962139,-0.36199 -1.088237,0.12751 -1.877923,0.71571 -0.710128,0.52895 -1.218404,0.5911 -1.188558,1.28882 -0.06706,-0.007 -0.130334,-0.008 -0.180867,-0.002 -0.161229,0.0187 -0.224964,0.0454 -0.395841,-0.0527 -0.170875,-0.0981 -0.179305,-0.16656 -0.244429,-0.31522 -0.06513,-0.14868 -0.217729,-0.37158 -0.523484,-0.45269 -0.305771,-0.0811 -0.680696,0.0739 -0.679026,0.13332 0.0016,0.0594 0.252655,-0.0426 0.504361,0.11576 0.251707,0.15837 0.233379,0.25624 0.28887,0.38292 0.05548,0.1267 0.155085,0.36451 0.432017,0.52349 0.276934,0.15896 0.531828,0.12449 0.669208,0.10851 0.104899,-0.0122 0.136234,-0.0302 0.20774,0.0243 0.210754,0.72266 0.721156,0.6429 1.605587,0.97565 0.962139,0.36199 1.256184,0.7975 2.045869,0.20929 0.578536,-0.43092 0.52042,-0.74207 0.380855,-1.25005 1.285693,-0.53767 1.960354,-1.55784 2.267045,-2.26756 0.347901,-0.80508 0.393468,-1.17692 1.314133,-1.71153 0.920665,-0.5346 1.26619,-0.39043 2.137855,-0.29351 0.871664,0.0969 2.326431,-0.0167 3.532084,-1.22836 1.205653,-1.21166 1.313579,-2.66762 1.212329,-3.5388 -0.101259,-0.87117 -0.247349,-1.21557 0.28267,-2.13889 0.530016,-0.92331 0.90043,-0.9705 1.703771,-1.3224 0.803344,-0.35189 2.005486,-1.17661 2.443779,-2.82876 0.438293,-1.65216 -0.197101,-2.9652 -0.72037,-3.66903 -0.523275,-0.70383 -0.822109,-0.92957 -0.824755,-1.9942 -0.0026,-1.064617 0.296204,-1.290862 0.81597,-1.997291 0.519766,-0.706424 1.146207,-2.022685 0.699698,-3.672642 C 75.90923,93.54616 74.704122,92.726098 73.899037,92.3782 73.093955,92.030302 72.72212,91.984737 72.187513,91.064067 c -0.534601,-0.920664 -0.390437,-1.266702 -0.293521,-2.138366 0.09692,-0.87167 -0.0167,-2.325916 -1.228349,-3.531571 -1.211665,-1.20565 -2.667629,-1.314095 -3.538802,-1.212843 -0.87117,0.101258 -1.215573,0.247861 -2.138887,-0.282152 -0.923314,-0.530018 -0.970502,-0.900435 -1.3224,-1.703774 -0.351899,-0.803295 -1.176611,-2.00595 -2.828769,-2.444247 -0.413039,-0.109574 -0.805067,-0.151817 -1.171504,-0.145726 z" - style="fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.19989915;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - </g> - <g - id="g1747"> - <g - aria-label="SETUP" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.22458273" - id="text1023"> - <path - d="m 28.181456,134.80041 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543912,0.0819 0.877276,0.25734 0.467881,0.23979 0.772004,0.61994 0.304122,0.38015 0.421092,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836431,0.69012 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - id="path1713" /> - <path - d="m 32.035622,126.47213 h 4.491654 v 1.40365 H 33.53284 v 1.98849 h 2.245827 v 1.40364 H 33.53284 v 2.03528 h 2.994436 v 1.40364 h -4.491654 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - id="path1715" /> - <path - d="m 42.884604,126.47213 v 1.40365 h -1.871523 v 6.83105 h -1.497218 v -6.83105 h -1.871522 v -1.40365 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - id="path1717" /> - <path - d="m 43.627365,126.47213 h 1.497218 v 5.24027 q 0,0.54976 0.04094,0.74861 0.08773,0.42109 0.374305,0.67842 0.292425,0.25734 0.707669,0.25734 0.415244,0 0.701821,-0.25734 0.292425,-0.25733 0.380153,-0.67842 0.04094,-0.19885 0.04094,-0.74861 v -5.24027 h 1.497218 v 5.24027 q 0,0.82464 -0.09358,1.22234 -0.198849,0.82463 -0.888973,1.34515 -0.684275,0.52052 -1.637582,0.52052 -0.953307,0 -1.643431,-0.52052 -0.684275,-0.52052 -0.883124,-1.34515 -0.09358,-0.3977 -0.09358,-1.22234 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - id="path1719" /> - <path - d="m 51.48191,127.87578 v 2.38619 h 0.362607 q 0.502972,0 0.731064,-0.0468 0.397698,-0.0819 0.631638,-0.39185 0.239789,-0.30997 0.239789,-0.75446 0,-0.44449 -0.239789,-0.75446 -0.23394,-0.30997 -0.631638,-0.39185 -0.228092,-0.0468 -0.731064,-0.0468 z m -1.497218,-1.40365 h 1.848128 q 0.865579,0 1.234035,0.0877 0.830488,0.19885 1.351006,0.87143 0.526365,0.66673 0.526365,1.63758 0,0.97085 -0.526365,1.64343 -0.520518,0.66673 -1.351006,0.86558 -0.368456,0.0877 -1.234035,0.0877 h -0.35091 v 3.04122 h -1.497218 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke-width:0.22458273" - id="path1721" /> - </g> - <g - aria-label="TOOLS" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#e5b62f;fill-opacity:1;stroke:none;stroke-width:0.22458273" - id="text1661"> - <path - d="m 60.833675,126.47213 v 1.40365 h -1.871523 v 6.83105 h -1.497218 v -6.83105 h -1.871522 v -1.40365 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - id="path1724" /> - <path - d="m 63.067804,129.08057 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105274,0.5673 0.386002,0.93576 0.286577,0.36261 0.74276,0.36261 0.456184,0 0.736912,-0.36261 0.286577,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105273,-0.56731 -0.39185,-0.92991 -0.280728,-0.36846 -0.736912,-0.36846 -0.456183,0 -0.74276,0.36846 -0.280728,0.3626 -0.386002,0.92991 z m -1.421187,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04113 0.23394,-0.94745 0.883125,-1.5557 0.649184,-0.61409 1.666824,-0.61409 1.017641,0 1.666825,0.61409 0.649185,0.60825 0.883125,1.5557 0.163758,0.64919 0.163758,2.04113 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60825 -1.666825,0.60825 -1.01764,0 -1.666824,-0.60825 -0.649185,-0.61409 -0.883125,-1.56155 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - id="path1726" /> - <path - d="m 69.050828,129.08057 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105273,0.5673 0.386001,0.93576 0.286577,0.36261 0.742761,0.36261 0.456183,0 0.736912,-0.36261 0.286576,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105274,-0.56731 -0.39185,-0.92991 -0.280729,-0.36846 -0.736912,-0.36846 -0.456184,0 -0.742761,0.36846 -0.280728,0.3626 -0.386001,0.92991 z m -1.421188,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04113 0.233941,-0.94745 0.883125,-1.5557 0.649184,-0.61409 1.666825,-0.61409 1.01764,0 1.666824,0.61409 0.649185,0.60825 0.883125,1.5557 0.163758,0.64919 0.163758,2.04113 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60825 -1.666824,0.60825 -1.017641,0 -1.666825,-0.60825 -0.649184,-0.61409 -0.883125,-1.56155 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - id="path1728" /> - <path - d="m 73.916786,134.70683 v -8.2347 h 1.497218 v 6.83106 h 3.368741 v 1.40364 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - id="path1730" /> - <path - d="m 82.028666,134.80041 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543911,0.0819 0.877276,0.25734 0.467881,0.23979 0.772003,0.61994 0.304123,0.38015 0.421093,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836431,0.69012 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - id="path1732" /> - </g> - </g> +<?xml version="1.0" encoding="UTF-8"?> +<svg id="svg1021" width="64.584mm" height="60.752mm" version="1.1" viewBox="0 0 64.584 60.752" xmlns="http://www.w3.org/2000/svg" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"> + <metadata id="metadata1018"> + <rdf:RDF> + <cc:Work rdf:about=""> + <dc:format>image/svg+xml</dc:format> + <dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/> + <dc:title/> + </cc:Work> + </rdf:RDF> + </metadata> + <rect id="rect830" x=".1" y=".1" width="64.384" height="60.552" ry="1.5737" fill="#336790" style="paint-order:markers fill stroke"/> + <g id="layer1" transform="translate(-22.93 -76.827)"> + <g id="g1769"> + <g id="g1751"> + <path id="rect934" d="m56.864 87.986c-1.2455-0.01183-3.6018 0.03564-3.6018 0.03564-0.11429 0-0.20619 0.09589-0.20619 0.21498v1.4624h-2.1461v-1.4624c0-0.1191-0.0919-0.21498-0.20619-0.21498h-2.559c-0.11429 0-0.2067 0.09589-0.2067 0.21498v5.4188c0 0.1191 0.09235 0.21498 0.2067 0.21498h2.559c0.11429 0 0.20619-0.09587 0.20619-0.21498v-1.4619h2.1461v1.4619c0 0.1191 0.09191 0.21498 0.20619 0.21498h1.295v6.8952c-0.74253 0.29471-1.2656 1.0166-1.2656 1.8666v10.45c0 1.1133 0.89588 2.0092 2.0092 2.0092 1.1133 0 2.0097-0.89589 2.0097-2.0092v-10.45c0-0.84994-0.52302-1.5718-1.2656-1.8666v-6.8952h1.4025c0.2234-0.19668 0.4387-0.98538 1.6319-1.5885 3.2043-1.1007 4.5957 1.626 4.5713 1.2495-0.02611-0.40191-0.20364-2.9223-3.0014-4.4008-1.2189-0.54811-2.652-1.0866-3.0386-1.109-0.03989-0.02102-0.33157-0.03172-0.74672-0.03564z" fill="#e5b62f" stroke="#e5b62f" stroke-width=".47745" style="paint-order:markers fill stroke"/> + <path id="path944" d="m59.665 79.605c-1.0993 0.01828-1.9696 0.47364-2.4975 0.8661-0.70383 0.52328-0.92957 0.82263-1.9942 0.82528-1.0646 0.0027-1.2909-0.2962-1.9973-0.81597-0.70642-0.51977-2.0227-1.1467-3.6726-0.70022-1.65 0.44651-2.47 1.6516-2.8179 2.4567-0.3479 0.80508-0.39346 1.1774-1.3141 1.712-0.92067 0.53461-1.2667 0.38993-2.1384 0.29301-0.87166-0.09692-2.3259 0.0172-3.5316 1.2289-1.2057 1.2117-1.3141 2.6671-1.2128 3.5383 0.10123 0.87118 0.24785 1.2161-0.28215 2.1394-0.53002 0.92331-0.90043 0.96999-1.7038 1.3219s-2.006 1.1771-2.4443 2.8293c-0.4383 1.6522 0.19712 2.9652 0.72037 3.669 0.52326 0.70383 0.82262 0.92957 0.82527 1.9942 0.0026 1.0646-0.29621 1.2904-0.81597 1.9968-0.51977 0.70643-1.1462 2.0227-0.6997 3.6726s1.6511 2.4705 2.4562 2.8184c0.80508 0.3479 1.1774 0.39347 1.712 1.3141 0.5346 0.92066 0.38993 1.2662 0.29301 2.1378-0.09692 0.87166 0.01712 2.3264 1.2289 3.5321 1.2117 1.2056 2.6671 1.311 3.5383 1.2098 0.87117-0.10126 1.2161-0.24477 2.1394 0.28525 0.92331 0.53002 0.96998 0.90043 1.3219 1.7038 0.3519 0.80334 1.1771 2.0055 2.8293 2.4438 1.6522 0.43829 3.678-0.39906 3.669-0.72037-0.0091-0.3213-1.3658 0.23152-2.7259-0.62425s-1.2623-1.3841-1.5622-2.0686c-0.29986-0.68453-0.83626-1.9698-2.3327-2.8288-1.4964-0.859-2.8761-0.67281-3.6184-0.58653-0.74233 0.0863-0.79194 0.23675-1.558-0.52555-0.7661-0.76231-0.6133-0.81116-0.53072-1.5539 0.08259-0.74275 0.26027-2.1221-0.60616-3.6142-0.86643-1.4921-2.1531-2.0228-2.8391-2.3192s-0.80341-0.18822-1.0857-1.2314-0.12923-1.0134 0.31368-1.6154c0.44289-0.60195 1.289-1.7064 1.2847-3.4318-0.0042-1.7254-0.85585-2.829-1.3017-3.4287-0.44588-0.59974-0.60063-0.56665-0.32349-1.6113 0.27712-1.0446 0.39654-0.93676 1.0811-1.2366 0.68454-0.29985 1.9698-0.83626 2.8288-2.3327 0.85899-1.4964 0.67281-2.8761 0.58653-3.6184-0.08624-0.74233-0.23624-0.79194 0.52606-1.558 0.76231-0.76611 0.81064-0.6164 1.5534-0.53382 0.74274 0.08258 2.1221 0.26337 3.6142-0.60306 1.4921-0.86643 2.0202-2.1531 2.3167-2.8391 0.29645-0.68602 0.19132-0.80341 1.2345-1.0857 1.0432-0.28231 1.0129-0.12921 1.6149 0.31368 0.60196 0.44289 1.7064 1.289 3.4318 1.2847 1.7254-0.0043 2.8259-0.85533 3.4256-1.3012 0.59974-0.44589 0.56717-0.60113 1.6118-0.32401 1.0446 0.27712 0.93934 0.39654 1.2392 1.0811 0.29986 0.68453 0.83678 1.9698 2.3332 2.8288 1.4964 0.859 2.8755 0.67332 3.6179 0.58704 0.74233-0.08628 0.79194-0.23676 1.558 0.52555 0.76611 0.7623 0.61382 0.81064 0.53124 1.5534-0.08258 0.74275-0.26078 2.1221 0.60565 3.6142 0.86643 1.4921 2.1531 2.0202 2.8391 2.3167 0.68602 0.29645 0.80392 0.19132 1.0862 1.2346 0.28231 1.0432 0.1287 1.0129-0.31419 1.6149-0.4429 0.60195-1.289 1.7064-1.2847 3.4318 0.0043 1.7254 0.85584 2.8259 1.3017 3.4256 0.44588 0.59974 0.60062 0.56717 0.3235 1.6118-0.27712 1.0446-0.39654 0.93986-1.0811 1.2397-0.68454 0.29985-1.9698 0.83626-2.8288 2.3327-0.859 1.4964-0.6728 2.8755-0.58653 3.6179 0.08628 0.74233 0.23676 0.79194-0.52555 1.558-0.76231 0.76611-0.81116 0.61382-1.5539 0.53124-0.74275-0.0826-2.1221-0.26027-3.6143 0.60616-1.4921 0.86644-2.0233 2.1531-2.3198 2.8391-0.24633 0.57005-0.22224 0.74776-0.80254 0.95343-0.08286-0.046-0.17925-0.0898-0.29352-0.1328-0.96214-0.36199-1.0882 0.12751-1.8779 0.71571-0.71013 0.52895-1.2184 0.5911-1.1886 1.2888-0.06706-7e-3 -0.13033-8e-3 -0.18087-2e-3 -0.16123 0.0187-0.22496 0.0454-0.39584-0.0527-0.17088-0.0981-0.1793-0.16656-0.24443-0.31522-0.06513-0.14868-0.21773-0.37158-0.52348-0.45269-0.30577-0.0811-0.6807 0.0739-0.67903 0.13332 0.0016 0.0594 0.25266-0.0426 0.50436 0.11576 0.25171 0.15837 0.23338 0.25624 0.28887 0.38292 0.05548 0.1267 0.15508 0.36451 0.43202 0.52349 0.27693 0.15896 0.53183 0.12449 0.66921 0.10851 0.1049-0.0122 0.13623-0.0302 0.20774 0.0243 0.21075 0.72266 0.72116 0.6429 1.6056 0.97565 0.96214 0.36199 1.2562 0.7975 2.0459 0.20929 0.57854-0.43092 0.52042-0.74207 0.38086-1.25 1.2857-0.53767 1.9604-1.5578 2.267-2.2676 0.3479-0.80508 0.39347-1.1769 1.3141-1.7115 0.92066-0.5346 1.2662-0.39043 2.1379-0.29351 0.87166 0.0969 2.3264-0.0167 3.5321-1.2284s1.3136-2.6676 1.2123-3.5388c-0.10126-0.87117-0.24735-1.2156 0.28267-2.1389 0.53002-0.92331 0.90043-0.9705 1.7038-1.3224 0.80334-0.35189 2.0055-1.1766 2.4438-2.8288 0.43829-1.6522-0.1971-2.9652-0.72037-3.669-0.52328-0.70383-0.82211-0.92957-0.82476-1.9942-0.0026-1.0646 0.2962-1.2909 0.81597-1.9973 0.51977-0.70642 1.1462-2.0227 0.6997-3.6726-0.4465-1.65-1.6516-2.47-2.4567-2.8179-0.80508-0.3479-1.1769-0.39346-1.7115-1.3141-0.5346-0.92066-0.39044-1.2667-0.29352-2.1384 0.09692-0.87167-0.0167-2.3259-1.2283-3.5316-1.2117-1.2056-2.6676-1.3141-3.5388-1.2128-0.87117 0.10126-1.2156 0.24786-2.1389-0.28215-0.92331-0.53002-0.9705-0.90044-1.3224-1.7038-0.3519-0.8033-1.1766-2.006-2.8288-2.4442-0.41304-0.10957-0.80507-0.15182-1.1715-0.14573z" fill="#fff" style="paint-order:markers fill stroke"/> + </g> + <g id="g1747" stroke-width=".22458"> + <g id="text1023" fill="#fff" aria-label="SETUP"> + <path id="path1713" d="m28.181 134.8q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1715" d="m32.036 126.47h4.4917v1.4036h-2.9944v1.9885h2.2458v1.4036h-2.2458v2.0353h2.9944v1.4036h-4.4917z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1717" d="m42.885 126.47v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1719" d="m43.627 126.47h1.4972v5.2403q0 0.54976 0.04094 0.74861 0.08773 0.42109 0.3743 0.67842 0.29242 0.25734 0.70767 0.25734t0.70182-0.25734q0.29242-0.25733 0.38015-0.67842 0.04094-0.19885 0.04094-0.74861v-5.2403h1.4972v5.2403q0 0.82464-0.09358 1.2223-0.19885 0.82463-0.88897 1.3452-0.68428 0.52052-1.6376 0.52052t-1.6434-0.52052q-0.68428-0.52052-0.88312-1.3452-0.09358-0.3977-0.09358-1.2223z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1721" d="m51.482 127.88v2.3862h0.36261q0.50297 0 0.73106-0.0468 0.3977-0.0819 0.63164-0.39185 0.23979-0.30997 0.23979-0.75446t-0.23979-0.75446q-0.23394-0.30997-0.63164-0.39185-0.22809-0.0468-0.73106-0.0468zm-1.4972-1.4036h1.8481q0.86558 0 1.234 0.0877 0.83049 0.19885 1.351 0.87143 0.52636 0.66673 0.52636 1.6376t-0.52636 1.6434q-0.52052 0.66673-1.351 0.86558-0.36846 0.0877-1.234 0.0877h-0.35091v3.0412h-1.4972z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> </g> + <g id="text1661" fill="#e5b62f" aria-label="TOOLS"> + <path id="path1724" d="m60.834 126.47v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1726" d="m63.068 129.08q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94745 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825-1.0176 0-1.6668-0.60825-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1728" d="m69.051 129.08q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94745 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825-1.0176 0-1.6668-0.60825-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1730" d="m73.917 134.71v-8.2347h1.4972v6.8311h3.3687v1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1732" d="m82.029 134.8q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + </g> </g> + </g> </svg> diff --git a/docs/images/logo-over-white.svg b/docs/images/logo-over-white.svg index 3ae3968e..1ed01380 100644 --- a/docs/images/logo-over-white.svg +++ b/docs/images/logo-over-white.svg @@ -1,106 +1,37 @@ -<?xml version="1.0" encoding="UTF-8" standalone="no"?> -<svg - xmlns:dc="http://purl.org/dc/elements/1.1/" - xmlns:cc="http://creativecommons.org/ns#" - xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" - xmlns:svg="http://www.w3.org/2000/svg" - xmlns="http://www.w3.org/2000/svg" - width="64.584335mm" - height="60.751602mm" - viewBox="0 0 64.584335 60.751602" - version="1.1" - id="svg1021"> - <defs - id="defs1015" /> - <metadata - id="metadata1018"> - <rdf:RDF> - <cc:Work - rdf:about=""> - <dc:format>image/svg+xml</dc:format> - <dc:type - rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> - <dc:title></dc:title> - </cc:Work> - </rdf:RDF> - </metadata> - <rect - style="fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.75281364;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" - id="rect1616" - width="64.384338" - height="60.551601" - x="0.1" - y="0.1" - ry="1.57368" /> - <g - id="layer1" - transform="translate(-22.929862,-76.826813)"> - <g - id="g1769"> - <g - id="g1751"> - <path - id="rect934" - d="m 56.863889,87.985612 c -1.245468,-0.01183 -3.601847,0.03564 -3.601847,0.03564 -0.114287,0 -0.20619,0.09589 -0.20619,0.214976 v 1.46244 H 50.90973 v -1.46244 c 0,-0.1191 -0.0919,-0.214976 -0.206189,-0.214976 h -2.559017 c -0.114286,0 -0.206705,0.09589 -0.206705,0.214976 v 5.418788 c 0,0.1191 0.09235,0.214977 0.206705,0.214977 h 2.559017 c 0.114287,0 0.206189,-0.09587 0.206189,-0.214977 v -1.461923 h 2.146122 v 1.461923 c 0,0.1191 0.09191,0.214977 0.20619,0.214977 h 1.295011 v 6.895187 c -0.742529,0.29471 -1.265555,1.01661 -1.265555,1.86655 v 10.45001 c 0,1.11329 0.895885,2.00918 2.00918,2.00918 1.113292,0 2.009695,-0.89589 2.009695,-2.00918 v -10.45001 c 0,-0.84994 -0.523022,-1.57184 -1.265555,-1.86655 v -6.895187 h 1.402499 c 0.223398,-0.196684 0.438705,-0.985378 1.631942,-1.588534 3.204289,-1.100736 4.595703,1.625992 4.571295,1.249532 -0.02611,-0.401913 -0.203636,-2.922341 -3.001367,-4.400762 -1.218895,-0.548108 -2.652008,-1.086577 -3.038575,-1.108977 -0.03989,-0.02102 -0.331568,-0.03172 -0.746723,-0.03564 z" - style="fill:#e5b62f;fill-opacity:1;fill-rule:nonzero;stroke:#e5b62f;stroke-width:0.4774465;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - <path - id="path944" - d="m 59.665281,79.605246 c -1.099312,0.01828 -1.969648,0.473642 -2.497521,0.866095 -0.703832,0.523277 -0.929574,0.822626 -1.994194,0.825277 -1.06462,0.0027 -1.290873,-0.296205 -1.997295,-0.815972 -0.706421,-0.519766 -2.022679,-1.14672 -3.672644,-0.700215 -1.649965,0.446506 -2.470018,1.651613 -2.817916,2.456698 -0.347897,0.80508 -0.393465,1.177432 -1.314132,1.712036 -0.920666,0.534609 -1.266695,0.389926 -2.13837,0.293006 -0.871661,-0.09692 -2.325918,0.0172 -3.531568,1.228868 -1.205665,1.211665 -1.314089,2.667116 -1.212846,3.538286 0.101228,0.871175 0.247854,1.216089 -0.282153,2.139404 -0.530022,0.923314 -0.900427,0.969986 -1.703772,1.321884 -0.803345,0.351898 -2.005999,1.177126 -2.444296,2.829281 -0.438296,1.652161 0.197124,2.965197 0.72037,3.66903 0.523262,0.703833 0.822625,0.929572 0.825273,1.994196 0.0026,1.06462 -0.296213,1.29035 -0.815971,1.99677 -0.519773,0.70643 -1.146203,2.02269 -0.699699,3.67265 0.446504,1.64996 1.651089,2.47053 2.456181,2.81843 0.805077,0.3479 1.17743,0.39347 1.71204,1.31414 0.534596,0.92066 0.389929,1.26618 0.293006,2.13785 -0.09692,0.87166 0.01712,2.32643 1.228865,3.53208 1.211669,1.20565 2.667112,1.311 3.538286,1.20975 0.871174,-0.10126 1.21609,-0.24477 2.139405,0.28525 0.923314,0.53002 0.96998,0.90043 1.321882,1.70377 0.351903,0.80334 1.177131,2.00548 2.829285,2.44378 1.652155,0.43829 3.67799,-0.39906 3.669028,-0.72037 -0.0091,-0.3213 -1.365829,0.23152 -2.725932,-0.62425 -1.360103,-0.85577 -1.262319,-1.38407 -1.562179,-2.06861 -0.299861,-0.68453 -0.836256,-1.96976 -2.332674,-2.82876 -1.496404,-0.859 -2.876052,-0.67281 -3.618384,-0.58653 -0.742333,0.0863 -0.791945,0.23675 -1.558045,-0.52555 -0.7661,-0.76231 -0.613297,-0.81116 -0.530717,-1.55391 0.08259,-0.74275 0.260268,-2.12213 -0.606165,-3.61425 -0.866433,-1.49213 -2.153087,-2.02279 -2.839103,-2.31924 -0.686016,-0.29645 -0.803407,-0.18822 -1.085722,-1.23145 -0.282315,-1.04323 -0.129234,-1.01345 0.313676,-1.61541 0.442894,-0.60195 1.288974,-1.7064 1.284676,-3.43183 -0.0042,-1.725427 -0.855847,-2.828991 -1.30173,-3.428727 -0.445882,-0.599742 -0.600626,-0.566654 -0.323494,-1.611273 0.277117,-1.044619 0.396537,-0.936759 1.081071,-1.23662 0.684535,-0.299854 1.969763,-0.836259 2.828768,-2.332673 0.85899,-1.496409 0.672807,-2.876051 0.586528,-3.618385 -0.08624,-0.742328 -0.23624,-0.791936 0.526065,-1.558041 0.762306,-0.766106 0.810639,-0.616402 1.553394,-0.533819 0.74274,0.08258 2.122129,0.263369 3.61425,-0.603064 1.492121,-0.866434 2.020204,-2.153086 2.316655,-2.839103 0.29645,-0.686018 0.191317,-0.803408 1.234549,-1.085722 1.043233,-0.282314 1.012934,-0.129212 1.614888,0.313676 0.601956,0.442893 1.706409,1.288969 3.431833,1.284673 1.72543,-0.0043 2.825891,-0.855327 3.42563,-1.301209 0.599739,-0.445887 0.567168,-0.601134 1.611789,-0.324013 1.044619,0.27712 0.939345,0.396538 1.2392,1.081071 0.299857,0.684534 0.836779,1.969772 2.333191,2.828771 1.496414,0.858999 2.875536,0.67332 3.617867,0.587044 0.742334,-0.08628 0.791938,-0.236759 1.558044,0.525546 0.766107,0.762305 0.613818,0.810644 0.531236,1.553397 -0.08258,0.742748 -0.260784,2.122126 0.605648,3.614245 0.86643,1.492124 2.153086,2.020213 2.839103,2.316659 0.686017,0.296447 0.803923,0.191318 1.086239,1.23455 0.282313,1.043226 0.128696,1.012939 -0.314193,1.614885 -0.442896,0.601952 -1.288968,1.706403 -1.284676,3.431832 0.0043,1.72543 0.855845,2.82589 1.301729,3.42563 0.445883,0.59974 0.600615,0.56717 0.323496,1.61179 -0.277125,1.04462 -0.396539,0.93986 -1.081072,1.23972 -0.684536,0.29985 -1.96977,0.83626 -2.828769,2.33267 -0.858996,1.49642 -0.672804,2.87554 -0.586528,3.61787 0.08628,0.74233 0.236756,0.79194 -0.525548,1.55804 -0.762307,0.76611 -0.811159,0.61382 -1.553911,0.53124 -0.742751,-0.0826 -2.122128,-0.26027 -3.614251,0.60616 -1.492118,0.86644 -2.023308,2.15309 -2.319753,2.8391 -0.246329,0.57005 -0.222236,0.74776 -0.802536,0.95343 -0.08286,-0.046 -0.17925,-0.0898 -0.293521,-0.1328 -0.962139,-0.36199 -1.088237,0.12751 -1.877923,0.71571 -0.710128,0.52895 -1.218404,0.5911 -1.188558,1.28882 -0.06706,-0.007 -0.130334,-0.008 -0.180867,-0.002 -0.161229,0.0187 -0.224964,0.0454 -0.395841,-0.0527 -0.170875,-0.0981 -0.179305,-0.16656 -0.244429,-0.31522 -0.06513,-0.14868 -0.217729,-0.37158 -0.523484,-0.45269 -0.305771,-0.0811 -0.680696,0.0739 -0.679026,0.13332 0.0016,0.0594 0.252655,-0.0426 0.504361,0.11576 0.251707,0.15837 0.233379,0.25624 0.28887,0.38292 0.05548,0.1267 0.155085,0.36451 0.432017,0.52349 0.276934,0.15896 0.531828,0.12449 0.669208,0.10851 0.104899,-0.0122 0.136234,-0.0302 0.20774,0.0243 0.210754,0.72266 0.721156,0.6429 1.605587,0.97565 0.962139,0.36199 1.256184,0.7975 2.045869,0.20929 0.578536,-0.43092 0.52042,-0.74207 0.380855,-1.25005 1.285693,-0.53767 1.960354,-1.55784 2.267045,-2.26756 0.347901,-0.80508 0.393468,-1.17692 1.314133,-1.71153 0.920665,-0.5346 1.26619,-0.39043 2.137855,-0.29351 0.871664,0.0969 2.326431,-0.0167 3.532084,-1.22836 1.205653,-1.21166 1.313579,-2.66762 1.212329,-3.5388 -0.101259,-0.87117 -0.247349,-1.21557 0.28267,-2.13889 0.530016,-0.92331 0.90043,-0.9705 1.703771,-1.3224 0.803344,-0.35189 2.005486,-1.17661 2.443779,-2.82876 0.438293,-1.65216 -0.197101,-2.9652 -0.72037,-3.66903 -0.523275,-0.70383 -0.822109,-0.92957 -0.824755,-1.9942 -0.0026,-1.064617 0.296204,-1.290862 0.81597,-1.997291 0.519766,-0.706424 1.146207,-2.022685 0.699698,-3.672642 C 75.90923,93.54616 74.704122,92.726098 73.899037,92.3782 73.093955,92.030302 72.72212,91.984737 72.187513,91.064067 c -0.534601,-0.920664 -0.390437,-1.266702 -0.293521,-2.138366 0.09692,-0.87167 -0.0167,-2.325916 -1.228349,-3.531571 -1.211665,-1.20565 -2.667629,-1.314095 -3.538802,-1.212843 -0.87117,0.101258 -1.215573,0.247861 -2.138887,-0.282152 -0.923314,-0.530018 -0.970502,-0.900435 -1.3224,-1.703774 -0.351899,-0.803295 -1.176611,-2.00595 -2.828769,-2.444247 -0.413039,-0.109574 -0.805067,-0.151817 -1.171504,-0.145726 z" - style="fill:#336790;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.19989915;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" /> - </g> - <g - id="g1747"> - <g - aria-label="SETUP" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#336790;fill-opacity:1;stroke:none;stroke-width:0.22458273" - id="text1023"> - <path - d="m 28.181456,134.80041 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543912,0.0819 0.877276,0.25734 0.467881,0.23979 0.772004,0.61994 0.304122,0.38015 0.421092,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836431,0.69012 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - id="path1713" /> - <path - d="m 32.035622,126.47213 h 4.491654 v 1.40365 H 33.53284 v 1.98849 h 2.245827 v 1.40364 H 33.53284 v 2.03528 h 2.994436 v 1.40364 h -4.491654 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - id="path1715" /> - <path - d="m 42.884604,126.47213 v 1.40365 h -1.871523 v 6.83105 h -1.497218 v -6.83105 h -1.871522 v -1.40365 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - id="path1717" /> - <path - d="m 43.627365,126.47213 h 1.497218 v 5.24027 q 0,0.54976 0.04094,0.74861 0.08773,0.42109 0.374305,0.67842 0.292425,0.25734 0.707669,0.25734 0.415244,0 0.701821,-0.25734 0.292425,-0.25733 0.380153,-0.67842 0.04094,-0.19885 0.04094,-0.74861 v -5.24027 h 1.497218 v 5.24027 q 0,0.82464 -0.09358,1.22234 -0.198849,0.82463 -0.888973,1.34515 -0.684275,0.52052 -1.637582,0.52052 -0.953307,0 -1.643431,-0.52052 -0.684275,-0.52052 -0.883124,-1.34515 -0.09358,-0.3977 -0.09358,-1.22234 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - id="path1719" /> - <path - d="m 51.48191,127.87578 v 2.38619 h 0.362607 q 0.502972,0 0.731064,-0.0468 0.397698,-0.0819 0.631638,-0.39185 0.239789,-0.30997 0.239789,-0.75446 0,-0.44449 -0.239789,-0.75446 -0.23394,-0.30997 -0.631638,-0.39185 -0.228092,-0.0468 -0.731064,-0.0468 z m -1.497218,-1.40365 h 1.848128 q 0.865579,0 1.234035,0.0877 0.830488,0.19885 1.351006,0.87143 0.526365,0.66673 0.526365,1.63758 0,0.97085 -0.526365,1.64343 -0.520518,0.66673 -1.351006,0.86558 -0.368456,0.0877 -1.234035,0.0877 h -0.35091 v 3.04122 h -1.497218 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - id="path1721" /> - </g> - <g - aria-label="TOOLS" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#e5b62f;fill-opacity:1;stroke:none;stroke-width:0.22458273" - id="text1661"> - <path - d="m 60.833675,126.47213 v 1.40365 h -1.871523 v 6.83105 h -1.497218 v -6.83105 h -1.871522 v -1.40365 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - id="path1724" /> - <path - d="m 63.067804,129.08057 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105274,0.5673 0.386002,0.93576 0.286577,0.36261 0.74276,0.36261 0.456184,0 0.736912,-0.36261 0.286577,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105273,-0.56731 -0.39185,-0.92991 -0.280728,-0.36846 -0.736912,-0.36846 -0.456183,0 -0.74276,0.36846 -0.280728,0.3626 -0.386002,0.92991 z m -1.421187,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04113 0.23394,-0.94745 0.883125,-1.5557 0.649184,-0.61409 1.666824,-0.61409 1.017641,0 1.666825,0.61409 0.649185,0.60825 0.883125,1.5557 0.163758,0.64919 0.163758,2.04113 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60825 -1.666825,0.60825 -1.01764,0 -1.666824,-0.60825 -0.649185,-0.61409 -0.883125,-1.56155 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - id="path1726" /> - <path - d="m 69.050828,129.08057 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105273,0.5673 0.386001,0.93576 0.286577,0.36261 0.742761,0.36261 0.456183,0 0.736912,-0.36261 0.286576,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105274,-0.56731 -0.39185,-0.92991 -0.280729,-0.36846 -0.736912,-0.36846 -0.456184,0 -0.742761,0.36846 -0.280728,0.3626 -0.386001,0.92991 z m -1.421188,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04113 0.233941,-0.94745 0.883125,-1.5557 0.649184,-0.61409 1.666825,-0.61409 1.01764,0 1.666824,0.61409 0.649185,0.60825 0.883125,1.5557 0.163758,0.64919 0.163758,2.04113 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60825 -1.666824,0.60825 -1.017641,0 -1.666825,-0.60825 -0.649184,-0.61409 -0.883125,-1.56155 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - id="path1728" /> - <path - d="m 73.916786,134.70683 v -8.2347 h 1.497218 v 6.83106 h 3.368741 v 1.40364 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - id="path1730" /> - <path - d="m 82.028666,134.80041 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543911,0.0819 0.877276,0.25734 0.467881,0.23979 0.772003,0.61994 0.304123,0.38015 0.421093,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836431,0.69012 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - id="path1732" /> - </g> - </g> +<?xml version="1.0" encoding="UTF-8"?> +<svg id="svg1021" width="64.584mm" height="60.752mm" version="1.1" viewBox="0 0 64.584 60.752" xmlns="http://www.w3.org/2000/svg" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"> + <metadata id="metadata1018"> + <rdf:RDF> + <cc:Work rdf:about=""> + <dc:format>image/svg+xml</dc:format> + <dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/> + <dc:title/> + </cc:Work> + </rdf:RDF> + </metadata> + <rect id="rect1616" x=".1" y=".1" width="64.384" height="60.552" ry="1.5737" fill="#fff" style="paint-order:markers fill stroke"/> + <g id="layer1" transform="translate(-22.93 -76.827)"> + <g id="g1769"> + <g id="g1751"> + <path id="rect934" d="m56.864 87.986c-1.2455-0.01183-3.6018 0.03564-3.6018 0.03564-0.11429 0-0.20619 0.09589-0.20619 0.21498v1.4624h-2.1461v-1.4624c0-0.1191-0.0919-0.21498-0.20619-0.21498h-2.559c-0.11429 0-0.2067 0.09589-0.2067 0.21498v5.4188c0 0.1191 0.09235 0.21498 0.2067 0.21498h2.559c0.11429 0 0.20619-0.09587 0.20619-0.21498v-1.4619h2.1461v1.4619c0 0.1191 0.09191 0.21498 0.20619 0.21498h1.295v6.8952c-0.74253 0.29471-1.2656 1.0166-1.2656 1.8666v10.45c0 1.1133 0.89588 2.0092 2.0092 2.0092 1.1133 0 2.0097-0.89589 2.0097-2.0092v-10.45c0-0.84994-0.52302-1.5718-1.2656-1.8666v-6.8952h1.4025c0.2234-0.19668 0.4387-0.98538 1.6319-1.5885 3.2043-1.1007 4.5957 1.626 4.5713 1.2495-0.02611-0.40191-0.20364-2.9223-3.0014-4.4008-1.2189-0.54811-2.652-1.0866-3.0386-1.109-0.03989-0.02102-0.33157-0.03172-0.74672-0.03564z" fill="#e5b62f" stroke="#e5b62f" stroke-width=".47745" style="paint-order:markers fill stroke"/> + <path id="path944" d="m59.665 79.605c-1.0993 0.01828-1.9696 0.47364-2.4975 0.8661-0.70383 0.52328-0.92957 0.82263-1.9942 0.82528-1.0646 0.0027-1.2909-0.2962-1.9973-0.81597-0.70642-0.51977-2.0227-1.1467-3.6726-0.70022-1.65 0.44651-2.47 1.6516-2.8179 2.4567-0.3479 0.80508-0.39346 1.1774-1.3141 1.712-0.92067 0.53461-1.2667 0.38993-2.1384 0.29301-0.87166-0.09692-2.3259 0.0172-3.5316 1.2289-1.2057 1.2117-1.3141 2.6671-1.2128 3.5383 0.10123 0.87118 0.24785 1.2161-0.28215 2.1394-0.53002 0.92331-0.90043 0.96999-1.7038 1.3219s-2.006 1.1771-2.4443 2.8293c-0.4383 1.6522 0.19712 2.9652 0.72037 3.669 0.52326 0.70383 0.82262 0.92957 0.82527 1.9942 0.0026 1.0646-0.29621 1.2904-0.81597 1.9968-0.51977 0.70643-1.1462 2.0227-0.6997 3.6726s1.6511 2.4705 2.4562 2.8184c0.80508 0.3479 1.1774 0.39347 1.712 1.3141 0.5346 0.92066 0.38993 1.2662 0.29301 2.1378-0.09692 0.87166 0.01712 2.3264 1.2289 3.5321 1.2117 1.2056 2.6671 1.311 3.5383 1.2098 0.87117-0.10126 1.2161-0.24477 2.1394 0.28525 0.92331 0.53002 0.96998 0.90043 1.3219 1.7038 0.3519 0.80334 1.1771 2.0055 2.8293 2.4438 1.6522 0.43829 3.678-0.39906 3.669-0.72037-0.0091-0.3213-1.3658 0.23152-2.7259-0.62425s-1.2623-1.3841-1.5622-2.0686c-0.29986-0.68453-0.83626-1.9698-2.3327-2.8288-1.4964-0.859-2.8761-0.67281-3.6184-0.58653-0.74233 0.0863-0.79194 0.23675-1.558-0.52555-0.7661-0.76231-0.6133-0.81116-0.53072-1.5539 0.08259-0.74275 0.26027-2.1221-0.60616-3.6142-0.86643-1.4921-2.1531-2.0228-2.8391-2.3192s-0.80341-0.18822-1.0857-1.2314-0.12923-1.0134 0.31368-1.6154c0.44289-0.60195 1.289-1.7064 1.2847-3.4318-0.0042-1.7254-0.85585-2.829-1.3017-3.4287-0.44588-0.59974-0.60063-0.56665-0.32349-1.6113 0.27712-1.0446 0.39654-0.93676 1.0811-1.2366 0.68454-0.29985 1.9698-0.83626 2.8288-2.3327 0.85899-1.4964 0.67281-2.8761 0.58653-3.6184-0.08624-0.74233-0.23624-0.79194 0.52606-1.558 0.76231-0.76611 0.81064-0.6164 1.5534-0.53382 0.74274 0.08258 2.1221 0.26337 3.6142-0.60306 1.4921-0.86643 2.0202-2.1531 2.3167-2.8391 0.29645-0.68602 0.19132-0.80341 1.2345-1.0857 1.0432-0.28231 1.0129-0.12921 1.6149 0.31368 0.60196 0.44289 1.7064 1.289 3.4318 1.2847 1.7254-0.0043 2.8259-0.85533 3.4256-1.3012 0.59974-0.44589 0.56717-0.60113 1.6118-0.32401 1.0446 0.27712 0.93934 0.39654 1.2392 1.0811 0.29986 0.68453 0.83678 1.9698 2.3332 2.8288 1.4964 0.859 2.8755 0.67332 3.6179 0.58704 0.74233-0.08628 0.79194-0.23676 1.558 0.52555 0.76611 0.7623 0.61382 0.81064 0.53124 1.5534-0.08258 0.74275-0.26078 2.1221 0.60565 3.6142 0.86643 1.4921 2.1531 2.0202 2.8391 2.3167 0.68602 0.29645 0.80392 0.19132 1.0862 1.2346 0.28231 1.0432 0.1287 1.0129-0.31419 1.6149-0.4429 0.60195-1.289 1.7064-1.2847 3.4318 0.0043 1.7254 0.85584 2.8259 1.3017 3.4256 0.44588 0.59974 0.60062 0.56717 0.3235 1.6118-0.27712 1.0446-0.39654 0.93986-1.0811 1.2397-0.68454 0.29985-1.9698 0.83626-2.8288 2.3327-0.859 1.4964-0.6728 2.8755-0.58653 3.6179 0.08628 0.74233 0.23676 0.79194-0.52555 1.558-0.76231 0.76611-0.81116 0.61382-1.5539 0.53124-0.74275-0.0826-2.1221-0.26027-3.6143 0.60616-1.4921 0.86644-2.0233 2.1531-2.3198 2.8391-0.24633 0.57005-0.22224 0.74776-0.80254 0.95343-0.08286-0.046-0.17925-0.0898-0.29352-0.1328-0.96214-0.36199-1.0882 0.12751-1.8779 0.71571-0.71013 0.52895-1.2184 0.5911-1.1886 1.2888-0.06706-7e-3 -0.13033-8e-3 -0.18087-2e-3 -0.16123 0.0187-0.22496 0.0454-0.39584-0.0527-0.17088-0.0981-0.1793-0.16656-0.24443-0.31522-0.06513-0.14868-0.21773-0.37158-0.52348-0.45269-0.30577-0.0811-0.6807 0.0739-0.67903 0.13332 0.0016 0.0594 0.25266-0.0426 0.50436 0.11576 0.25171 0.15837 0.23338 0.25624 0.28887 0.38292 0.05548 0.1267 0.15508 0.36451 0.43202 0.52349 0.27693 0.15896 0.53183 0.12449 0.66921 0.10851 0.1049-0.0122 0.13623-0.0302 0.20774 0.0243 0.21075 0.72266 0.72116 0.6429 1.6056 0.97565 0.96214 0.36199 1.2562 0.7975 2.0459 0.20929 0.57854-0.43092 0.52042-0.74207 0.38086-1.25 1.2857-0.53767 1.9604-1.5578 2.267-2.2676 0.3479-0.80508 0.39347-1.1769 1.3141-1.7115 0.92066-0.5346 1.2662-0.39043 2.1379-0.29351 0.87166 0.0969 2.3264-0.0167 3.5321-1.2284s1.3136-2.6676 1.2123-3.5388c-0.10126-0.87117-0.24735-1.2156 0.28267-2.1389 0.53002-0.92331 0.90043-0.9705 1.7038-1.3224 0.80334-0.35189 2.0055-1.1766 2.4438-2.8288 0.43829-1.6522-0.1971-2.9652-0.72037-3.669-0.52328-0.70383-0.82211-0.92957-0.82476-1.9942-0.0026-1.0646 0.2962-1.2909 0.81597-1.9973 0.51977-0.70642 1.1462-2.0227 0.6997-3.6726-0.4465-1.65-1.6516-2.47-2.4567-2.8179-0.80508-0.3479-1.1769-0.39346-1.7115-1.3141-0.5346-0.92066-0.39044-1.2667-0.29352-2.1384 0.09692-0.87167-0.0167-2.3259-1.2283-3.5316-1.2117-1.2056-2.6676-1.3141-3.5388-1.2128-0.87117 0.10126-1.2156 0.24786-2.1389-0.28215-0.92331-0.53002-0.9705-0.90044-1.3224-1.7038-0.3519-0.8033-1.1766-2.006-2.8288-2.4442-0.41304-0.10957-0.80507-0.15182-1.1715-0.14573z" fill="#336790" style="paint-order:markers fill stroke"/> + </g> + <g id="g1747" stroke-width=".22458"> + <g id="text1023" fill="#336790" aria-label="SETUP"> + <path id="path1713" d="m28.181 134.8q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1715" d="m32.036 126.47h4.4917v1.4036h-2.9944v1.9885h2.2458v1.4036h-2.2458v2.0353h2.9944v1.4036h-4.4917z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1717" d="m42.885 126.47v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1719" d="m43.627 126.47h1.4972v5.2403q0 0.54976 0.04094 0.74861 0.08773 0.42109 0.3743 0.67842 0.29242 0.25734 0.70767 0.25734t0.70182-0.25734q0.29242-0.25733 0.38015-0.67842 0.04094-0.19885 0.04094-0.74861v-5.2403h1.4972v5.2403q0 0.82464-0.09358 1.2223-0.19885 0.82463-0.88897 1.3452-0.68428 0.52052-1.6376 0.52052t-1.6434-0.52052q-0.68428-0.52052-0.88312-1.3452-0.09358-0.3977-0.09358-1.2223z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1721" d="m51.482 127.88v2.3862h0.36261q0.50297 0 0.73106-0.0468 0.3977-0.0819 0.63164-0.39185 0.23979-0.30997 0.23979-0.75446t-0.23979-0.75446q-0.23394-0.30997-0.63164-0.39185-0.22809-0.0468-0.73106-0.0468zm-1.4972-1.4036h1.8481q0.86558 0 1.234 0.0877 0.83049 0.19885 1.351 0.87143 0.52636 0.66673 0.52636 1.6376t-0.52636 1.6434q-0.52052 0.66673-1.351 0.86558-0.36846 0.0877-1.234 0.0877h-0.35091v3.0412h-1.4972z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> </g> + <g id="text1661" fill="#e5b62f" aria-label="TOOLS"> + <path id="path1724" d="m60.834 126.47v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1726" d="m63.068 129.08q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94745 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825-1.0176 0-1.6668-0.60825-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1728" d="m69.051 129.08q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94745 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825-1.0176 0-1.6668-0.60825-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1730" d="m73.917 134.71v-8.2347h1.4972v6.8311h3.3687v1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1732" d="m82.029 134.8q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + </g> </g> + </g> </svg> diff --git a/docs/images/logo-symbol-only.svg b/docs/images/logo-symbol-only.svg index 7d839c65..2bbf2d58 100644 --- a/docs/images/logo-symbol-only.svg +++ b/docs/images/logo-symbol-only.svg @@ -1,46 +1,20 @@ -<?xml version="1.0" encoding="UTF-8" standalone="no"?> -<svg - xmlns:dc="http://purl.org/dc/elements/1.1/" - xmlns:cc="http://creativecommons.org/ns#" - xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" - xmlns:svg="http://www.w3.org/2000/svg" - xmlns="http://www.w3.org/2000/svg" - id="svg1021" - version="1.1" - viewBox="0 0 50.799998 50.799999" - height="192" - width="192"> - <defs - id="defs1015" /> - <metadata - id="metadata1018"> - <rdf:RDF> - <cc:Work - rdf:about=""> - <dc:format>image/svg+xml</dc:format> - <dc:type - rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> - <dc:title></dc:title> - </cc:Work> - </rdf:RDF> - </metadata> - <g - transform="matrix(1.1479242,0,0,1.1479242,-37.990707,-97.660395)" - id="layer1"> - <g - id="g1769"> - <g - id="g1751" - transform="translate(1.4742777e-4,5.70132)"> - <path - style="fill:#e5b62f;fill-opacity:1;fill-rule:nonzero;stroke:#e5b62f;stroke-width:0.4774465;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" - d="m 56.863889,87.985612 c -1.245468,-0.01183 -3.601847,0.03564 -3.601847,0.03564 -0.114287,0 -0.20619,0.09589 -0.20619,0.214976 v 1.46244 H 50.90973 v -1.46244 c 0,-0.1191 -0.0919,-0.214976 -0.206189,-0.214976 h -2.559017 c -0.114286,0 -0.206705,0.09589 -0.206705,0.214976 v 5.418788 c 0,0.1191 0.09235,0.214977 0.206705,0.214977 h 2.559017 c 0.114287,0 0.206189,-0.09587 0.206189,-0.214977 v -1.461923 h 2.146122 v 1.461923 c 0,0.1191 0.09191,0.214977 0.20619,0.214977 h 1.295011 v 6.895187 c -0.742529,0.29471 -1.265555,1.01661 -1.265555,1.86655 v 10.45001 c 0,1.11329 0.895885,2.00918 2.00918,2.00918 1.113292,0 2.009695,-0.89589 2.009695,-2.00918 v -10.45001 c 0,-0.84994 -0.523022,-1.57184 -1.265555,-1.86655 v -6.895187 h 1.402499 c 0.223398,-0.196684 0.438705,-0.985378 1.631942,-1.588534 3.204289,-1.100736 4.595703,1.625992 4.571295,1.249532 -0.02611,-0.401913 -0.203636,-2.922341 -3.001367,-4.400762 -1.218895,-0.548108 -2.652008,-1.086577 -3.038575,-1.108977 -0.03989,-0.02102 -0.331568,-0.03172 -0.746723,-0.03564 z" - id="rect934" /> - <path - style="fill:#336790;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.19989915;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" - d="m 59.665281,79.605246 c -1.099312,0.01828 -1.969648,0.473642 -2.497521,0.866095 -0.703832,0.523277 -0.929574,0.822626 -1.994194,0.825277 -1.06462,0.0027 -1.290873,-0.296205 -1.997295,-0.815972 -0.706421,-0.519766 -2.022679,-1.14672 -3.672644,-0.700215 -1.649965,0.446506 -2.470018,1.651613 -2.817916,2.456698 -0.347897,0.80508 -0.393465,1.177432 -1.314132,1.712036 -0.920666,0.534609 -1.266695,0.389926 -2.13837,0.293006 -0.871661,-0.09692 -2.325918,0.0172 -3.531568,1.228868 -1.205665,1.211665 -1.314089,2.667116 -1.212846,3.538286 0.101228,0.871175 0.247854,1.216089 -0.282153,2.139404 -0.530022,0.923314 -0.900427,0.969986 -1.703772,1.321884 -0.803345,0.351898 -2.005999,1.177126 -2.444296,2.829281 -0.438296,1.652161 0.197124,2.965197 0.72037,3.66903 0.523262,0.703833 0.822625,0.929572 0.825273,1.994196 0.0026,1.06462 -0.296213,1.29035 -0.815971,1.99677 -0.519773,0.70643 -1.146203,2.02269 -0.699699,3.67265 0.446504,1.64996 1.651089,2.47053 2.456181,2.81843 0.805077,0.3479 1.17743,0.39347 1.71204,1.31414 0.534596,0.92066 0.389929,1.26618 0.293006,2.13785 -0.09692,0.87166 0.01712,2.32643 1.228865,3.53208 1.211669,1.20565 2.667112,1.311 3.538286,1.20975 0.871174,-0.10126 1.21609,-0.24477 2.139405,0.28525 0.923314,0.53002 0.96998,0.90043 1.321882,1.70377 0.351903,0.80334 1.177131,2.00548 2.829285,2.44378 1.652155,0.43829 3.67799,-0.39906 3.669028,-0.72037 -0.0091,-0.3213 -1.365829,0.23152 -2.725932,-0.62425 -1.360103,-0.85577 -1.262319,-1.38407 -1.562179,-2.06861 -0.299861,-0.68453 -0.836256,-1.96976 -2.332674,-2.82876 -1.496404,-0.859 -2.876052,-0.67281 -3.618384,-0.58653 -0.742333,0.0863 -0.791945,0.23675 -1.558045,-0.52555 -0.7661,-0.76231 -0.613297,-0.81116 -0.530717,-1.55391 0.08259,-0.74275 0.260268,-2.12213 -0.606165,-3.61425 -0.866433,-1.49213 -2.153087,-2.02279 -2.839103,-2.31924 -0.686016,-0.29645 -0.803407,-0.18822 -1.085722,-1.23145 -0.282315,-1.04323 -0.129234,-1.01345 0.313676,-1.61541 0.442894,-0.60195 1.288974,-1.7064 1.284676,-3.43183 -0.0042,-1.725427 -0.855847,-2.828991 -1.30173,-3.428727 -0.445882,-0.599742 -0.600626,-0.566654 -0.323494,-1.611273 0.277117,-1.044619 0.396537,-0.936759 1.081071,-1.23662 0.684535,-0.299854 1.969763,-0.836259 2.828768,-2.332673 0.85899,-1.496409 0.672807,-2.876051 0.586528,-3.618385 -0.08624,-0.742328 -0.23624,-0.791936 0.526065,-1.558041 0.762306,-0.766106 0.810639,-0.616402 1.553394,-0.533819 0.74274,0.08258 2.122129,0.263369 3.61425,-0.603064 1.492121,-0.866434 2.020204,-2.153086 2.316655,-2.839103 0.29645,-0.686018 0.191317,-0.803408 1.234549,-1.085722 1.043233,-0.282314 1.012934,-0.129212 1.614888,0.313676 0.601956,0.442893 1.706409,1.288969 3.431833,1.284673 1.72543,-0.0043 2.825891,-0.855327 3.42563,-1.301209 0.599739,-0.445887 0.567168,-0.601134 1.611789,-0.324013 1.044619,0.27712 0.939345,0.396538 1.2392,1.081071 0.299857,0.684534 0.836779,1.969772 2.333191,2.828771 1.496414,0.858999 2.875536,0.67332 3.617867,0.587044 0.742334,-0.08628 0.791938,-0.236759 1.558044,0.525546 0.766107,0.762305 0.613818,0.810644 0.531236,1.553397 -0.08258,0.742748 -0.260784,2.122126 0.605648,3.614245 0.86643,1.492124 2.153086,2.020213 2.839103,2.316659 0.686017,0.296447 0.803923,0.191318 1.086239,1.23455 0.282313,1.043226 0.128696,1.012939 -0.314193,1.614885 -0.442896,0.601952 -1.288968,1.706403 -1.284676,3.431832 0.0043,1.72543 0.855845,2.82589 1.301729,3.42563 0.445883,0.59974 0.600615,0.56717 0.323496,1.61179 -0.277125,1.04462 -0.396539,0.93986 -1.081072,1.23972 -0.684536,0.29985 -1.96977,0.83626 -2.828769,2.33267 -0.858996,1.49642 -0.672804,2.87554 -0.586528,3.61787 0.08628,0.74233 0.236756,0.79194 -0.525548,1.55804 -0.762307,0.76611 -0.811159,0.61382 -1.553911,0.53124 -0.742751,-0.0826 -2.122128,-0.26027 -3.614251,0.60616 -1.492118,0.86644 -2.023308,2.15309 -2.319753,2.8391 -0.246329,0.57005 -0.222236,0.74776 -0.802536,0.95343 -0.08286,-0.046 -0.17925,-0.0898 -0.293521,-0.1328 -0.962139,-0.36199 -1.088237,0.12751 -1.877923,0.71571 -0.710128,0.52895 -1.218404,0.5911 -1.188558,1.28882 -0.06706,-0.007 -0.130334,-0.008 -0.180867,-0.002 -0.161229,0.0187 -0.224964,0.0454 -0.395841,-0.0527 -0.170875,-0.0981 -0.179305,-0.16656 -0.244429,-0.31522 -0.06513,-0.14868 -0.217729,-0.37158 -0.523484,-0.45269 -0.305771,-0.0811 -0.680696,0.0739 -0.679026,0.13332 0.0016,0.0594 0.252655,-0.0426 0.504361,0.11576 0.251707,0.15837 0.233379,0.25624 0.28887,0.38292 0.05548,0.1267 0.155085,0.36451 0.432017,0.52349 0.276934,0.15896 0.531828,0.12449 0.669208,0.10851 0.104899,-0.0122 0.136234,-0.0302 0.20774,0.0243 0.210754,0.72266 0.721156,0.6429 1.605587,0.97565 0.962139,0.36199 1.256184,0.7975 2.045869,0.20929 0.578536,-0.43092 0.52042,-0.74207 0.380855,-1.25005 1.285693,-0.53767 1.960354,-1.55784 2.267045,-2.26756 0.347901,-0.80508 0.393468,-1.17692 1.314133,-1.71153 0.920665,-0.5346 1.26619,-0.39043 2.137855,-0.29351 0.871664,0.0969 2.326431,-0.0167 3.532084,-1.22836 1.205653,-1.21166 1.313579,-2.66762 1.212329,-3.5388 -0.101259,-0.87117 -0.247349,-1.21557 0.28267,-2.13889 0.530016,-0.92331 0.90043,-0.9705 1.703771,-1.3224 0.803344,-0.35189 2.005486,-1.17661 2.443779,-2.82876 0.438293,-1.65216 -0.197101,-2.9652 -0.72037,-3.66903 -0.523275,-0.70383 -0.822109,-0.92957 -0.824755,-1.9942 -0.0026,-1.064617 0.296204,-1.290862 0.81597,-1.997291 0.519766,-0.706424 1.146207,-2.022685 0.699698,-3.672642 C 75.90923,93.54616 74.704122,92.726098 73.899037,92.3782 73.093955,92.030302 72.72212,91.984737 72.187513,91.064067 c -0.534601,-0.920664 -0.390437,-1.266702 -0.293521,-2.138366 0.09692,-0.87167 -0.0167,-2.325916 -1.228349,-3.531571 -1.211665,-1.20565 -2.667629,-1.314095 -3.538802,-1.212843 -0.87117,0.101258 -1.215573,0.247861 -2.138887,-0.282152 -0.923314,-0.530018 -0.970502,-0.900435 -1.3224,-1.703774 -0.351899,-0.803295 -1.176611,-2.00595 -2.828769,-2.444247 -0.413039,-0.109574 -0.805067,-0.151817 -1.171504,-0.145726 z" - id="path944" /> - </g> - </g> +<?xml version="1.0" encoding="UTF-8"?> +<svg id="svg1021" width="192" height="192" version="1.1" viewBox="0 0 50.8 50.8" xmlns="http://www.w3.org/2000/svg" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"> + <metadata id="metadata1018"> + <rdf:RDF> + <cc:Work rdf:about=""> + <dc:format>image/svg+xml</dc:format> + <dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/> + <dc:title/> + </cc:Work> + </rdf:RDF> + </metadata> + <g id="layer1" transform="matrix(1.1479 0 0 1.1479 -37.991 -97.66)"> + <g id="g1769"> + <g id="g1751" transform="translate(.00014743 5.7013)"> + <path id="rect934" d="m56.864 87.986c-1.2455-0.01183-3.6018 0.03564-3.6018 0.03564-0.11429 0-0.20619 0.09589-0.20619 0.21498v1.4624h-2.1461v-1.4624c0-0.1191-0.0919-0.21498-0.20619-0.21498h-2.559c-0.11429 0-0.2067 0.09589-0.2067 0.21498v5.4188c0 0.1191 0.09235 0.21498 0.2067 0.21498h2.559c0.11429 0 0.20619-0.09587 0.20619-0.21498v-1.4619h2.1461v1.4619c0 0.1191 0.09191 0.21498 0.20619 0.21498h1.295v6.8952c-0.74253 0.29471-1.2656 1.0166-1.2656 1.8666v10.45c0 1.1133 0.89588 2.0092 2.0092 2.0092 1.1133 0 2.0097-0.89589 2.0097-2.0092v-10.45c0-0.84994-0.52302-1.5718-1.2656-1.8666v-6.8952h1.4025c0.2234-0.19668 0.4387-0.98538 1.6319-1.5885 3.2043-1.1007 4.5957 1.626 4.5713 1.2495-0.02611-0.40191-0.20364-2.9223-3.0014-4.4008-1.2189-0.54811-2.652-1.0866-3.0386-1.109-0.03989-0.02102-0.33157-0.03172-0.74672-0.03564z" fill="#e5b62f" stroke="#e5b62f" stroke-width=".47745" style="paint-order:markers fill stroke"/> + <path id="path944" d="m59.665 79.605c-1.0993 0.01828-1.9696 0.47364-2.4975 0.8661-0.70383 0.52328-0.92957 0.82263-1.9942 0.82528-1.0646 0.0027-1.2909-0.2962-1.9973-0.81597-0.70642-0.51977-2.0227-1.1467-3.6726-0.70022-1.65 0.44651-2.47 1.6516-2.8179 2.4567-0.3479 0.80508-0.39346 1.1774-1.3141 1.712-0.92067 0.53461-1.2667 0.38993-2.1384 0.29301-0.87166-0.09692-2.3259 0.0172-3.5316 1.2289-1.2057 1.2117-1.3141 2.6671-1.2128 3.5383 0.10123 0.87118 0.24785 1.2161-0.28215 2.1394-0.53002 0.92331-0.90043 0.96999-1.7038 1.3219s-2.006 1.1771-2.4443 2.8293c-0.4383 1.6522 0.19712 2.9652 0.72037 3.669 0.52326 0.70383 0.82262 0.92957 0.82527 1.9942 0.0026 1.0646-0.29621 1.2904-0.81597 1.9968-0.51977 0.70643-1.1462 2.0227-0.6997 3.6726s1.6511 2.4705 2.4562 2.8184c0.80508 0.3479 1.1774 0.39347 1.712 1.3141 0.5346 0.92066 0.38993 1.2662 0.29301 2.1378-0.09692 0.87166 0.01712 2.3264 1.2289 3.5321 1.2117 1.2056 2.6671 1.311 3.5383 1.2098 0.87117-0.10126 1.2161-0.24477 2.1394 0.28525 0.92331 0.53002 0.96998 0.90043 1.3219 1.7038 0.3519 0.80334 1.1771 2.0055 2.8293 2.4438 1.6522 0.43829 3.678-0.39906 3.669-0.72037-0.0091-0.3213-1.3658 0.23152-2.7259-0.62425s-1.2623-1.3841-1.5622-2.0686c-0.29986-0.68453-0.83626-1.9698-2.3327-2.8288-1.4964-0.859-2.8761-0.67281-3.6184-0.58653-0.74233 0.0863-0.79194 0.23675-1.558-0.52555-0.7661-0.76231-0.6133-0.81116-0.53072-1.5539 0.08259-0.74275 0.26027-2.1221-0.60616-3.6142-0.86643-1.4921-2.1531-2.0228-2.8391-2.3192s-0.80341-0.18822-1.0857-1.2314-0.12923-1.0134 0.31368-1.6154c0.44289-0.60195 1.289-1.7064 1.2847-3.4318-0.0042-1.7254-0.85585-2.829-1.3017-3.4287-0.44588-0.59974-0.60063-0.56665-0.32349-1.6113 0.27712-1.0446 0.39654-0.93676 1.0811-1.2366 0.68454-0.29985 1.9698-0.83626 2.8288-2.3327 0.85899-1.4964 0.67281-2.8761 0.58653-3.6184-0.08624-0.74233-0.23624-0.79194 0.52606-1.558 0.76231-0.76611 0.81064-0.6164 1.5534-0.53382 0.74274 0.08258 2.1221 0.26337 3.6142-0.60306 1.4921-0.86643 2.0202-2.1531 2.3167-2.8391 0.29645-0.68602 0.19132-0.80341 1.2345-1.0857 1.0432-0.28231 1.0129-0.12921 1.6149 0.31368 0.60196 0.44289 1.7064 1.289 3.4318 1.2847 1.7254-0.0043 2.8259-0.85533 3.4256-1.3012 0.59974-0.44589 0.56717-0.60113 1.6118-0.32401 1.0446 0.27712 0.93934 0.39654 1.2392 1.0811 0.29986 0.68453 0.83678 1.9698 2.3332 2.8288 1.4964 0.859 2.8755 0.67332 3.6179 0.58704 0.74233-0.08628 0.79194-0.23676 1.558 0.52555 0.76611 0.7623 0.61382 0.81064 0.53124 1.5534-0.08258 0.74275-0.26078 2.1221 0.60565 3.6142 0.86643 1.4921 2.1531 2.0202 2.8391 2.3167 0.68602 0.29645 0.80392 0.19132 1.0862 1.2346 0.28231 1.0432 0.1287 1.0129-0.31419 1.6149-0.4429 0.60195-1.289 1.7064-1.2847 3.4318 0.0043 1.7254 0.85584 2.8259 1.3017 3.4256 0.44588 0.59974 0.60062 0.56717 0.3235 1.6118-0.27712 1.0446-0.39654 0.93986-1.0811 1.2397-0.68454 0.29985-1.9698 0.83626-2.8288 2.3327-0.859 1.4964-0.6728 2.8755-0.58653 3.6179 0.08628 0.74233 0.23676 0.79194-0.52555 1.558-0.76231 0.76611-0.81116 0.61382-1.5539 0.53124-0.74275-0.0826-2.1221-0.26027-3.6143 0.60616-1.4921 0.86644-2.0233 2.1531-2.3198 2.8391-0.24633 0.57005-0.22224 0.74776-0.80254 0.95343-0.08286-0.046-0.17925-0.0898-0.29352-0.1328-0.96214-0.36199-1.0882 0.12751-1.8779 0.71571-0.71013 0.52895-1.2184 0.5911-1.1886 1.2888-0.06706-7e-3 -0.13033-8e-3 -0.18087-2e-3 -0.16123 0.0187-0.22496 0.0454-0.39584-0.0527-0.17088-0.0981-0.1793-0.16656-0.24443-0.31522-0.06513-0.14868-0.21773-0.37158-0.52348-0.45269-0.30577-0.0811-0.6807 0.0739-0.67903 0.13332 0.0016 0.0594 0.25266-0.0426 0.50436 0.11576 0.25171 0.15837 0.23338 0.25624 0.28887 0.38292 0.05548 0.1267 0.15508 0.36451 0.43202 0.52349 0.27693 0.15896 0.53183 0.12449 0.66921 0.10851 0.1049-0.0122 0.13623-0.0302 0.20774 0.0243 0.21075 0.72266 0.72116 0.6429 1.6056 0.97565 0.96214 0.36199 1.2562 0.7975 2.0459 0.20929 0.57854-0.43092 0.52042-0.74207 0.38086-1.25 1.2857-0.53767 1.9604-1.5578 2.267-2.2676 0.3479-0.80508 0.39347-1.1769 1.3141-1.7115 0.92066-0.5346 1.2662-0.39043 2.1379-0.29351 0.87166 0.0969 2.3264-0.0167 3.5321-1.2284s1.3136-2.6676 1.2123-3.5388c-0.10126-0.87117-0.24735-1.2156 0.28267-2.1389 0.53002-0.92331 0.90043-0.9705 1.7038-1.3224 0.80334-0.35189 2.0055-1.1766 2.4438-2.8288 0.43829-1.6522-0.1971-2.9652-0.72037-3.669-0.52328-0.70383-0.82211-0.92957-0.82476-1.9942-0.0026-1.0646 0.2962-1.2909 0.81597-1.9973 0.51977-0.70642 1.1462-2.0227 0.6997-3.6726-0.4465-1.65-1.6516-2.47-2.4567-2.8179-0.80508-0.3479-1.1769-0.39346-1.7115-1.3141-0.5346-0.92066-0.39044-1.2667-0.29352-2.1384 0.09692-0.87167-0.0167-2.3259-1.2283-3.5316-1.2117-1.2056-2.6676-1.3141-3.5388-1.2128-0.87117 0.10126-1.2156 0.24786-2.1389-0.28215-0.92331-0.53002-0.9705-0.90044-1.3224-1.7038-0.3519-0.8033-1.1766-2.006-2.8288-2.4442-0.41304-0.10957-0.80507-0.15182-1.1715-0.14573z" fill="#336790" style="paint-order:markers fill stroke"/> + </g> </g> + </g> </svg> diff --git a/docs/images/logo-text-only.svg b/docs/images/logo-text-only.svg index a59731d4..2e92580d 100644 --- a/docs/images/logo-text-only.svg +++ b/docs/images/logo-text-only.svg @@ -1,85 +1,30 @@ -<?xml version="1.0" encoding="UTF-8" standalone="no"?> -<svg - xmlns:dc="http://purl.org/dc/elements/1.1/" - xmlns:cc="http://creativecommons.org/ns#" - xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" - xmlns:svg="http://www.w3.org/2000/svg" - xmlns="http://www.w3.org/2000/svg" - id="svg1021" - version="1.1" - viewBox="0 0 59.489777 9.2218504" - height="9.2218504mm" - width="59.489777mm"> - <defs - id="defs1015" /> - <metadata - id="metadata1018"> - <rdf:RDF> - <cc:Work - rdf:about=""> - <dc:format>image/svg+xml</dc:format> - <dc:type - rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> - <dc:title></dc:title> - </cc:Work> - </rdf:RDF> - </metadata> - <g - transform="translate(-25.477144,-102.59162)" - id="layer1"> - <g - transform="translate(0,-23.386941)" - id="g1747"> - <g - aria-label="SETUP" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#336790;fill-opacity:1;stroke:none;stroke-width:0.22458273" - id="text1023"> - <path - d="m 28.181456,134.80041 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543912,0.0819 0.877276,0.25734 0.467881,0.23979 0.772004,0.61994 0.304122,0.38015 0.421092,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836431,0.69012 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - id="path1713" /> - <path - d="m 32.035622,126.47213 h 4.491654 v 1.40365 H 33.53284 v 1.98849 h 2.245827 v 1.40364 H 33.53284 v 2.03528 h 2.994436 v 1.40364 h -4.491654 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - id="path1715" /> - <path - d="m 42.884604,126.47213 v 1.40365 h -1.871523 v 6.83105 h -1.497218 v -6.83105 h -1.871522 v -1.40365 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - id="path1717" /> - <path - d="m 43.627365,126.47213 h 1.497218 v 5.24027 q 0,0.54976 0.04094,0.74861 0.08773,0.42109 0.374305,0.67842 0.292425,0.25734 0.707669,0.25734 0.415244,0 0.701821,-0.25734 0.292425,-0.25733 0.380153,-0.67842 0.04094,-0.19885 0.04094,-0.74861 v -5.24027 h 1.497218 v 5.24027 q 0,0.82464 -0.09358,1.22234 -0.198849,0.82463 -0.888973,1.34515 -0.684275,0.52052 -1.637582,0.52052 -0.953307,0 -1.643431,-0.52052 -0.684275,-0.52052 -0.883124,-1.34515 -0.09358,-0.3977 -0.09358,-1.22234 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - id="path1719" /> - <path - d="m 51.48191,127.87578 v 2.38619 h 0.362607 q 0.502972,0 0.731064,-0.0468 0.397698,-0.0819 0.631638,-0.39185 0.239789,-0.30997 0.239789,-0.75446 0,-0.44449 -0.239789,-0.75446 -0.23394,-0.30997 -0.631638,-0.39185 -0.228092,-0.0468 -0.731064,-0.0468 z m -1.497218,-1.40365 h 1.848128 q 0.865579,0 1.234035,0.0877 0.830488,0.19885 1.351006,0.87143 0.526365,0.66673 0.526365,1.63758 0,0.97085 -0.526365,1.64343 -0.520518,0.66673 -1.351006,0.86558 -0.368456,0.0877 -1.234035,0.0877 h -0.35091 v 3.04122 h -1.497218 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - id="path1721" /> - </g> - <g - aria-label="TOOLS" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#e5b62f;fill-opacity:1;stroke:none;stroke-width:0.22458273" - id="text1661"> - <path - d="m 60.833675,126.47213 v 1.40365 h -1.871523 v 6.83105 h -1.497218 v -6.83105 h -1.871522 v -1.40365 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - id="path1724" /> - <path - d="m 63.067804,129.08057 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105274,0.5673 0.386002,0.93576 0.286577,0.36261 0.74276,0.36261 0.456184,0 0.736912,-0.36261 0.286577,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105273,-0.56731 -0.39185,-0.92991 -0.280728,-0.36846 -0.736912,-0.36846 -0.456183,0 -0.74276,0.36846 -0.280728,0.3626 -0.386002,0.92991 z m -1.421187,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04113 0.23394,-0.94745 0.883125,-1.5557 0.649184,-0.61409 1.666824,-0.61409 1.017641,0 1.666825,0.61409 0.649185,0.60825 0.883125,1.5557 0.163758,0.64919 0.163758,2.04113 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60825 -1.666825,0.60825 -1.01764,0 -1.666824,-0.60825 -0.649185,-0.61409 -0.883125,-1.56155 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - id="path1726" /> - <path - d="m 69.050828,129.08057 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105273,0.5673 0.386001,0.93576 0.286577,0.36261 0.742761,0.36261 0.456183,0 0.736912,-0.36261 0.286576,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105274,-0.56731 -0.39185,-0.92991 -0.280729,-0.36846 -0.736912,-0.36846 -0.456184,0 -0.742761,0.36846 -0.280728,0.3626 -0.386001,0.92991 z m -1.421188,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04113 0.233941,-0.94745 0.883125,-1.5557 0.649184,-0.61409 1.666825,-0.61409 1.01764,0 1.666824,0.61409 0.649185,0.60825 0.883125,1.5557 0.163758,0.64919 0.163758,2.04113 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60825 -1.666824,0.60825 -1.017641,0 -1.666825,-0.60825 -0.649184,-0.61409 -0.883125,-1.56155 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - id="path1728" /> - <path - d="m 73.916786,134.70683 v -8.2347 h 1.497218 v 6.83106 h 3.368741 v 1.40364 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - id="path1730" /> - <path - d="m 82.028666,134.80041 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543911,0.0819 0.877276,0.25734 0.467881,0.23979 0.772003,0.61994 0.304123,0.38015 0.421093,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836431,0.69012 z" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - id="path1732" /> - </g> - </g> +<?xml version="1.0" encoding="UTF-8"?> +<svg id="svg1021" width="59.49mm" height="9.2219mm" version="1.1" viewBox="0 0 59.49 9.2219" xmlns="http://www.w3.org/2000/svg" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"> + <metadata id="metadata1018"> + <rdf:RDF> + <cc:Work rdf:about=""> + <dc:format>image/svg+xml</dc:format> + <dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/> + <dc:title/> + </cc:Work> + </rdf:RDF> + </metadata> + <g id="layer1" transform="translate(-25.477 -102.59)"> + <g id="g1747" transform="translate(0 -23.387)" stroke-width=".22458"> + <g id="text1023" fill="#336790" aria-label="SETUP"> + <path id="path1713" d="m28.181 134.8q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1715" d="m32.036 126.47h4.4917v1.4036h-2.9944v1.9885h2.2458v1.4036h-2.2458v2.0353h2.9944v1.4036h-4.4917z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1717" d="m42.885 126.47v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1719" d="m43.627 126.47h1.4972v5.2403q0 0.54976 0.04094 0.74861 0.08773 0.42109 0.3743 0.67842 0.29242 0.25734 0.70767 0.25734t0.70182-0.25734q0.29242-0.25733 0.38015-0.67842 0.04094-0.19885 0.04094-0.74861v-5.2403h1.4972v5.2403q0 0.82464-0.09358 1.2223-0.19885 0.82463-0.88897 1.3452-0.68428 0.52052-1.6376 0.52052t-1.6434-0.52052q-0.68428-0.52052-0.88312-1.3452-0.09358-0.3977-0.09358-1.2223z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1721" d="m51.482 127.88v2.3862h0.36261q0.50297 0 0.73106-0.0468 0.3977-0.0819 0.63164-0.39185 0.23979-0.30997 0.23979-0.75446t-0.23979-0.75446q-0.23394-0.30997-0.63164-0.39185-0.22809-0.0468-0.73106-0.0468zm-1.4972-1.4036h1.8481q0.86558 0 1.234 0.0877 0.83049 0.19885 1.351 0.87143 0.52636 0.66673 0.52636 1.6376t-0.52636 1.6434q-0.52052 0.66673-1.351 0.86558-0.36846 0.0877-1.234 0.0877h-0.35091v3.0412h-1.4972z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + <g id="text1661" fill="#e5b62f" aria-label="TOOLS"> + <path id="path1724" d="m60.834 126.47v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1726" d="m63.068 129.08q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94745 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825-1.0176 0-1.6668-0.60825-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1728" d="m69.051 129.08q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94745 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825-1.0176 0-1.6668-0.60825-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1730" d="m73.917 134.71v-8.2347h1.4972v6.8311h3.3687v1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1732" d="m82.029 134.8q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> </g> + </g> </svg> diff --git a/docs/images/logo.svg b/docs/images/logo.svg index 103d294f..7c793a08 100644 --- a/docs/images/logo.svg +++ b/docs/images/logo.svg @@ -1,98 +1,36 @@ -<?xml version="1.0" encoding="UTF-8" standalone="no"?> -<svg - xmlns:dc="http://purl.org/dc/elements/1.1/" - xmlns:cc="http://creativecommons.org/ns#" - xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" - xmlns:svg="http://www.w3.org/2000/svg" - xmlns="http://www.w3.org/2000/svg" - id="svg1021" - version="1.1" - viewBox="0 0 60.536667 56.932983" - height="56.932983mm" - width="60.536667mm"> - <defs - id="defs1015" /> - <metadata - id="metadata1018"> - <rdf:RDF> - <cc:Work - rdf:about=""> - <dc:format>image/svg+xml</dc:format> - <dc:type - rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> - <dc:title></dc:title> - </cc:Work> - </rdf:RDF> - </metadata> - <g - transform="translate(-24.953698,-78.736052)" - id="layer1"> - <g - id="g1769"> - <g - id="g1751"> - <path - style="fill:#e5b62f;fill-opacity:1;fill-rule:nonzero;stroke:#e5b62f;stroke-width:0.4774465;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" - d="m 56.863889,87.985612 c -1.245468,-0.01183 -3.601847,0.03564 -3.601847,0.03564 -0.114287,0 -0.20619,0.09589 -0.20619,0.214976 v 1.46244 H 50.90973 v -1.46244 c 0,-0.1191 -0.0919,-0.214976 -0.206189,-0.214976 h -2.559017 c -0.114286,0 -0.206705,0.09589 -0.206705,0.214976 v 5.418788 c 0,0.1191 0.09235,0.214977 0.206705,0.214977 h 2.559017 c 0.114287,0 0.206189,-0.09587 0.206189,-0.214977 v -1.461923 h 2.146122 v 1.461923 c 0,0.1191 0.09191,0.214977 0.20619,0.214977 h 1.295011 v 6.895187 c -0.742529,0.29471 -1.265555,1.01661 -1.265555,1.86655 v 10.45001 c 0,1.11329 0.895885,2.00918 2.00918,2.00918 1.113292,0 2.009695,-0.89589 2.009695,-2.00918 v -10.45001 c 0,-0.84994 -0.523022,-1.57184 -1.265555,-1.86655 v -6.895187 h 1.402499 c 0.223398,-0.196684 0.438705,-0.985378 1.631942,-1.588534 3.204289,-1.100736 4.595703,1.625992 4.571295,1.249532 -0.02611,-0.401913 -0.203636,-2.922341 -3.001367,-4.400762 -1.218895,-0.548108 -2.652008,-1.086577 -3.038575,-1.108977 -0.03989,-0.02102 -0.331568,-0.03172 -0.746723,-0.03564 z" - id="rect934" /> - <path - style="fill:#336790;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.19989915;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:markers fill stroke" - d="m 59.665281,79.605246 c -1.099312,0.01828 -1.969648,0.473642 -2.497521,0.866095 -0.703832,0.523277 -0.929574,0.822626 -1.994194,0.825277 -1.06462,0.0027 -1.290873,-0.296205 -1.997295,-0.815972 -0.706421,-0.519766 -2.022679,-1.14672 -3.672644,-0.700215 -1.649965,0.446506 -2.470018,1.651613 -2.817916,2.456698 -0.347897,0.80508 -0.393465,1.177432 -1.314132,1.712036 -0.920666,0.534609 -1.266695,0.389926 -2.13837,0.293006 -0.871661,-0.09692 -2.325918,0.0172 -3.531568,1.228868 -1.205665,1.211665 -1.314089,2.667116 -1.212846,3.538286 0.101228,0.871175 0.247854,1.216089 -0.282153,2.139404 -0.530022,0.923314 -0.900427,0.969986 -1.703772,1.321884 -0.803345,0.351898 -2.005999,1.177126 -2.444296,2.829281 -0.438296,1.652161 0.197124,2.965197 0.72037,3.66903 0.523262,0.703833 0.822625,0.929572 0.825273,1.994196 0.0026,1.06462 -0.296213,1.29035 -0.815971,1.99677 -0.519773,0.70643 -1.146203,2.02269 -0.699699,3.67265 0.446504,1.64996 1.651089,2.47053 2.456181,2.81843 0.805077,0.3479 1.17743,0.39347 1.71204,1.31414 0.534596,0.92066 0.389929,1.26618 0.293006,2.13785 -0.09692,0.87166 0.01712,2.32643 1.228865,3.53208 1.211669,1.20565 2.667112,1.311 3.538286,1.20975 0.871174,-0.10126 1.21609,-0.24477 2.139405,0.28525 0.923314,0.53002 0.96998,0.90043 1.321882,1.70377 0.351903,0.80334 1.177131,2.00548 2.829285,2.44378 1.652155,0.43829 3.67799,-0.39906 3.669028,-0.72037 -0.0091,-0.3213 -1.365829,0.23152 -2.725932,-0.62425 -1.360103,-0.85577 -1.262319,-1.38407 -1.562179,-2.06861 -0.299861,-0.68453 -0.836256,-1.96976 -2.332674,-2.82876 -1.496404,-0.859 -2.876052,-0.67281 -3.618384,-0.58653 -0.742333,0.0863 -0.791945,0.23675 -1.558045,-0.52555 -0.7661,-0.76231 -0.613297,-0.81116 -0.530717,-1.55391 0.08259,-0.74275 0.260268,-2.12213 -0.606165,-3.61425 -0.866433,-1.49213 -2.153087,-2.02279 -2.839103,-2.31924 -0.686016,-0.29645 -0.803407,-0.18822 -1.085722,-1.23145 -0.282315,-1.04323 -0.129234,-1.01345 0.313676,-1.61541 0.442894,-0.60195 1.288974,-1.7064 1.284676,-3.43183 -0.0042,-1.725427 -0.855847,-2.828991 -1.30173,-3.428727 -0.445882,-0.599742 -0.600626,-0.566654 -0.323494,-1.611273 0.277117,-1.044619 0.396537,-0.936759 1.081071,-1.23662 0.684535,-0.299854 1.969763,-0.836259 2.828768,-2.332673 0.85899,-1.496409 0.672807,-2.876051 0.586528,-3.618385 -0.08624,-0.742328 -0.23624,-0.791936 0.526065,-1.558041 0.762306,-0.766106 0.810639,-0.616402 1.553394,-0.533819 0.74274,0.08258 2.122129,0.263369 3.61425,-0.603064 1.492121,-0.866434 2.020204,-2.153086 2.316655,-2.839103 0.29645,-0.686018 0.191317,-0.803408 1.234549,-1.085722 1.043233,-0.282314 1.012934,-0.129212 1.614888,0.313676 0.601956,0.442893 1.706409,1.288969 3.431833,1.284673 1.72543,-0.0043 2.825891,-0.855327 3.42563,-1.301209 0.599739,-0.445887 0.567168,-0.601134 1.611789,-0.324013 1.044619,0.27712 0.939345,0.396538 1.2392,1.081071 0.299857,0.684534 0.836779,1.969772 2.333191,2.828771 1.496414,0.858999 2.875536,0.67332 3.617867,0.587044 0.742334,-0.08628 0.791938,-0.236759 1.558044,0.525546 0.766107,0.762305 0.613818,0.810644 0.531236,1.553397 -0.08258,0.742748 -0.260784,2.122126 0.605648,3.614245 0.86643,1.492124 2.153086,2.020213 2.839103,2.316659 0.686017,0.296447 0.803923,0.191318 1.086239,1.23455 0.282313,1.043226 0.128696,1.012939 -0.314193,1.614885 -0.442896,0.601952 -1.288968,1.706403 -1.284676,3.431832 0.0043,1.72543 0.855845,2.82589 1.301729,3.42563 0.445883,0.59974 0.600615,0.56717 0.323496,1.61179 -0.277125,1.04462 -0.396539,0.93986 -1.081072,1.23972 -0.684536,0.29985 -1.96977,0.83626 -2.828769,2.33267 -0.858996,1.49642 -0.672804,2.87554 -0.586528,3.61787 0.08628,0.74233 0.236756,0.79194 -0.525548,1.55804 -0.762307,0.76611 -0.811159,0.61382 -1.553911,0.53124 -0.742751,-0.0826 -2.122128,-0.26027 -3.614251,0.60616 -1.492118,0.86644 -2.023308,2.15309 -2.319753,2.8391 -0.246329,0.57005 -0.222236,0.74776 -0.802536,0.95343 -0.08286,-0.046 -0.17925,-0.0898 -0.293521,-0.1328 -0.962139,-0.36199 -1.088237,0.12751 -1.877923,0.71571 -0.710128,0.52895 -1.218404,0.5911 -1.188558,1.28882 -0.06706,-0.007 -0.130334,-0.008 -0.180867,-0.002 -0.161229,0.0187 -0.224964,0.0454 -0.395841,-0.0527 -0.170875,-0.0981 -0.179305,-0.16656 -0.244429,-0.31522 -0.06513,-0.14868 -0.217729,-0.37158 -0.523484,-0.45269 -0.305771,-0.0811 -0.680696,0.0739 -0.679026,0.13332 0.0016,0.0594 0.252655,-0.0426 0.504361,0.11576 0.251707,0.15837 0.233379,0.25624 0.28887,0.38292 0.05548,0.1267 0.155085,0.36451 0.432017,0.52349 0.276934,0.15896 0.531828,0.12449 0.669208,0.10851 0.104899,-0.0122 0.136234,-0.0302 0.20774,0.0243 0.210754,0.72266 0.721156,0.6429 1.605587,0.97565 0.962139,0.36199 1.256184,0.7975 2.045869,0.20929 0.578536,-0.43092 0.52042,-0.74207 0.380855,-1.25005 1.285693,-0.53767 1.960354,-1.55784 2.267045,-2.26756 0.347901,-0.80508 0.393468,-1.17692 1.314133,-1.71153 0.920665,-0.5346 1.26619,-0.39043 2.137855,-0.29351 0.871664,0.0969 2.326431,-0.0167 3.532084,-1.22836 1.205653,-1.21166 1.313579,-2.66762 1.212329,-3.5388 -0.101259,-0.87117 -0.247349,-1.21557 0.28267,-2.13889 0.530016,-0.92331 0.90043,-0.9705 1.703771,-1.3224 0.803344,-0.35189 2.005486,-1.17661 2.443779,-2.82876 0.438293,-1.65216 -0.197101,-2.9652 -0.72037,-3.66903 -0.523275,-0.70383 -0.822109,-0.92957 -0.824755,-1.9942 -0.0026,-1.064617 0.296204,-1.290862 0.81597,-1.997291 0.519766,-0.706424 1.146207,-2.022685 0.699698,-3.672642 C 75.90923,93.54616 74.704122,92.726098 73.899037,92.3782 73.093955,92.030302 72.72212,91.984737 72.187513,91.064067 c -0.534601,-0.920664 -0.390437,-1.266702 -0.293521,-2.138366 0.09692,-0.87167 -0.0167,-2.325916 -1.228349,-3.531571 -1.211665,-1.20565 -2.667629,-1.314095 -3.538802,-1.212843 -0.87117,0.101258 -1.215573,0.247861 -2.138887,-0.282152 -0.923314,-0.530018 -0.970502,-0.900435 -1.3224,-1.703774 -0.351899,-0.803295 -1.176611,-2.00595 -2.828769,-2.444247 -0.413039,-0.109574 -0.805067,-0.151817 -1.171504,-0.145726 z" - id="path944" /> - </g> - <g - id="g1747"> - <g - id="text1023" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#336790;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="SETUP"> - <path - id="path1713" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 28.181456,134.80041 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543912,0.0819 0.877276,0.25734 0.467881,0.23979 0.772004,0.61994 0.304122,0.38015 0.421092,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836431,0.69012 z" /> - <path - id="path1715" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 32.035622,126.47213 h 4.491654 v 1.40365 H 33.53284 v 1.98849 h 2.245827 v 1.40364 H 33.53284 v 2.03528 h 2.994436 v 1.40364 h -4.491654 z" /> - <path - id="path1717" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 42.884604,126.47213 v 1.40365 h -1.871523 v 6.83105 h -1.497218 v -6.83105 h -1.871522 v -1.40365 z" /> - <path - id="path1719" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 43.627365,126.47213 h 1.497218 v 5.24027 q 0,0.54976 0.04094,0.74861 0.08773,0.42109 0.374305,0.67842 0.292425,0.25734 0.707669,0.25734 0.415244,0 0.701821,-0.25734 0.292425,-0.25733 0.380153,-0.67842 0.04094,-0.19885 0.04094,-0.74861 v -5.24027 h 1.497218 v 5.24027 q 0,0.82464 -0.09358,1.22234 -0.198849,0.82463 -0.888973,1.34515 -0.684275,0.52052 -1.637582,0.52052 -0.953307,0 -1.643431,-0.52052 -0.684275,-0.52052 -0.883124,-1.34515 -0.09358,-0.3977 -0.09358,-1.22234 z" /> - <path - id="path1721" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#336790;fill-opacity:1;stroke-width:0.22458273" - d="m 51.48191,127.87578 v 2.38619 h 0.362607 q 0.502972,0 0.731064,-0.0468 0.397698,-0.0819 0.631638,-0.39185 0.239789,-0.30997 0.239789,-0.75446 0,-0.44449 -0.239789,-0.75446 -0.23394,-0.30997 -0.631638,-0.39185 -0.228092,-0.0468 -0.731064,-0.0468 z m -1.497218,-1.40365 h 1.848128 q 0.865579,0 1.234035,0.0877 0.830488,0.19885 1.351006,0.87143 0.526365,0.66673 0.526365,1.63758 0,0.97085 -0.526365,1.64343 -0.520518,0.66673 -1.351006,0.86558 -0.368456,0.0877 -1.234035,0.0877 h -0.35091 v 3.04122 h -1.497218 z" /> - </g> - <g - id="text1661" - style="font-style:normal;font-weight:normal;font-size:8.98330784px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#e5b62f;fill-opacity:1;stroke:none;stroke-width:0.22458273" - aria-label="TOOLS"> - <path - id="path1724" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 60.833675,126.47213 v 1.40365 h -1.871523 v 6.83105 h -1.497218 v -6.83105 h -1.871522 v -1.40365 z" /> - <path - id="path1726" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 63.067804,129.08057 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105274,0.5673 0.386002,0.93576 0.286577,0.36261 0.74276,0.36261 0.456184,0 0.736912,-0.36261 0.286577,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105273,-0.56731 -0.39185,-0.92991 -0.280728,-0.36846 -0.736912,-0.36846 -0.456183,0 -0.74276,0.36846 -0.280728,0.3626 -0.386002,0.92991 z m -1.421187,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04113 0.23394,-0.94745 0.883125,-1.5557 0.649184,-0.61409 1.666824,-0.61409 1.017641,0 1.666825,0.61409 0.649185,0.60825 0.883125,1.5557 0.163758,0.64919 0.163758,2.04113 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60825 -1.666825,0.60825 -1.01764,0 -1.666824,-0.60825 -0.649185,-0.61409 -0.883125,-1.56155 z" /> - <path - id="path1728" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 69.050828,129.08057 q -0.08773,0.47958 -0.08773,1.50891 0,1.02934 0.08773,1.50892 0.105273,0.5673 0.386001,0.93576 0.286577,0.36261 0.742761,0.36261 0.456183,0 0.736912,-0.36261 0.286576,-0.36846 0.39185,-0.93576 0.08773,-0.48543 0.08773,-1.50892 0,-1.02933 -0.08773,-1.50891 -0.105274,-0.56731 -0.39185,-0.92991 -0.280729,-0.36846 -0.736912,-0.36846 -0.456184,0 -0.742761,0.36846 -0.280728,0.3626 -0.386001,0.92991 z m -1.421188,3.55004 q -0.163758,-0.64918 -0.163758,-2.04113 0,-1.39194 0.163758,-2.04113 0.233941,-0.94745 0.883125,-1.5557 0.649184,-0.61409 1.666825,-0.61409 1.01764,0 1.666824,0.61409 0.649185,0.60825 0.883125,1.5557 0.163758,0.64919 0.163758,2.04113 0,1.39195 -0.163758,2.04113 -0.23394,0.94746 -0.883125,1.56155 -0.649184,0.60825 -1.666824,0.60825 -1.017641,0 -1.666825,-0.60825 -0.649184,-0.61409 -0.883125,-1.56155 z" /> - <path - id="path1730" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 73.916786,134.70683 v -8.2347 h 1.497218 v 6.83106 h 3.368741 v 1.40364 z" /> - <path - id="path1732" - style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:8.98330784px;font-family:Monoid;-inkscape-font-specification:'Monoid Bold';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;writing-mode:lr-tb;text-anchor:start;fill:#e5b62f;fill-opacity:1;stroke-width:0.22458273" - d="m 82.028666,134.80041 q -0.959155,0 -2.163948,-0.18715 v -1.42704 q 1.40949,0.21055 2.163948,0.21055 1.041034,0 1.041034,-1.11122 0,-0.30997 -0.152061,-0.58485 -0.146213,-0.28073 -0.415244,-0.386 -0.169607,-0.0643 -0.467881,-0.10527 -0.298274,-0.0409 -0.614093,-0.11697 -0.309971,-0.0819 -0.596548,-0.25149 -1.099519,-0.65503 -1.099519,-2.07037 0,-1.01179 0.643336,-1.70192 0.649184,-0.69012 1.660976,-0.69012 0.789548,0 2.163948,0.14621 v 1.40949 q -1.421188,-0.15206 -2.163948,-0.15206 -0.222243,0 -0.39185,0.0994 -0.163758,0.0994 -0.251486,0.25734 -0.08773,0.15791 -0.128667,0.32167 -0.03509,0.16375 -0.03509,0.32166 0,0.69013 0.46788,0.90067 0.152062,0.0702 0.690124,0.15791 0.543911,0.0819 0.877276,0.25734 0.467881,0.23979 0.772003,0.61994 0.304123,0.38015 0.421093,0.76616 0.11697,0.386 0.11697,0.80124 0,1.13461 -0.701821,1.82474 -0.701821,0.69012 -1.836431,0.69012 z" /> - </g> - </g> +<?xml version="1.0" encoding="UTF-8"?> +<svg id="svg1021" width="60.537mm" height="56.933mm" version="1.1" viewBox="0 0 60.537 56.933" xmlns="http://www.w3.org/2000/svg" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"> + <metadata id="metadata1018"> + <rdf:RDF> + <cc:Work rdf:about=""> + <dc:format>image/svg+xml</dc:format> + <dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/> + <dc:title/> + </cc:Work> + </rdf:RDF> + </metadata> + <g id="layer1" transform="translate(-24.954 -78.736)"> + <g id="g1769"> + <g id="g1751"> + <path id="rect934" d="m56.864 87.986c-1.2455-0.01183-3.6018 0.03564-3.6018 0.03564-0.11429 0-0.20619 0.09589-0.20619 0.21498v1.4624h-2.1461v-1.4624c0-0.1191-0.0919-0.21498-0.20619-0.21498h-2.559c-0.11429 0-0.2067 0.09589-0.2067 0.21498v5.4188c0 0.1191 0.09235 0.21498 0.2067 0.21498h2.559c0.11429 0 0.20619-0.09587 0.20619-0.21498v-1.4619h2.1461v1.4619c0 0.1191 0.09191 0.21498 0.20619 0.21498h1.295v6.8952c-0.74253 0.29471-1.2656 1.0166-1.2656 1.8666v10.45c0 1.1133 0.89588 2.0092 2.0092 2.0092 1.1133 0 2.0097-0.89589 2.0097-2.0092v-10.45c0-0.84994-0.52302-1.5718-1.2656-1.8666v-6.8952h1.4025c0.2234-0.19668 0.4387-0.98538 1.6319-1.5885 3.2043-1.1007 4.5957 1.626 4.5713 1.2495-0.02611-0.40191-0.20364-2.9223-3.0014-4.4008-1.2189-0.54811-2.652-1.0866-3.0386-1.109-0.03989-0.02102-0.33157-0.03172-0.74672-0.03564z" fill="#e5b62f" stroke="#e5b62f" stroke-width=".47745" style="paint-order:markers fill stroke"/> + <path id="path944" d="m59.665 79.605c-1.0993 0.01828-1.9696 0.47364-2.4975 0.8661-0.70383 0.52328-0.92957 0.82263-1.9942 0.82528-1.0646 0.0027-1.2909-0.2962-1.9973-0.81597-0.70642-0.51977-2.0227-1.1467-3.6726-0.70022-1.65 0.44651-2.47 1.6516-2.8179 2.4567-0.3479 0.80508-0.39346 1.1774-1.3141 1.712-0.92067 0.53461-1.2667 0.38993-2.1384 0.29301-0.87166-0.09692-2.3259 0.0172-3.5316 1.2289-1.2057 1.2117-1.3141 2.6671-1.2128 3.5383 0.10123 0.87118 0.24785 1.2161-0.28215 2.1394-0.53002 0.92331-0.90043 0.96999-1.7038 1.3219s-2.006 1.1771-2.4443 2.8293c-0.4383 1.6522 0.19712 2.9652 0.72037 3.669 0.52326 0.70383 0.82262 0.92957 0.82527 1.9942 0.0026 1.0646-0.29621 1.2904-0.81597 1.9968-0.51977 0.70643-1.1462 2.0227-0.6997 3.6726s1.6511 2.4705 2.4562 2.8184c0.80508 0.3479 1.1774 0.39347 1.712 1.3141 0.5346 0.92066 0.38993 1.2662 0.29301 2.1378-0.09692 0.87166 0.01712 2.3264 1.2289 3.5321 1.2117 1.2056 2.6671 1.311 3.5383 1.2098 0.87117-0.10126 1.2161-0.24477 2.1394 0.28525 0.92331 0.53002 0.96998 0.90043 1.3219 1.7038 0.3519 0.80334 1.1771 2.0055 2.8293 2.4438 1.6522 0.43829 3.678-0.39906 3.669-0.72037-0.0091-0.3213-1.3658 0.23152-2.7259-0.62425s-1.2623-1.3841-1.5622-2.0686c-0.29986-0.68453-0.83626-1.9698-2.3327-2.8288-1.4964-0.859-2.8761-0.67281-3.6184-0.58653-0.74233 0.0863-0.79194 0.23675-1.558-0.52555-0.7661-0.76231-0.6133-0.81116-0.53072-1.5539 0.08259-0.74275 0.26027-2.1221-0.60616-3.6142-0.86643-1.4921-2.1531-2.0228-2.8391-2.3192s-0.80341-0.18822-1.0857-1.2314-0.12923-1.0134 0.31368-1.6154c0.44289-0.60195 1.289-1.7064 1.2847-3.4318-0.0042-1.7254-0.85585-2.829-1.3017-3.4287-0.44588-0.59974-0.60063-0.56665-0.32349-1.6113 0.27712-1.0446 0.39654-0.93676 1.0811-1.2366 0.68454-0.29985 1.9698-0.83626 2.8288-2.3327 0.85899-1.4964 0.67281-2.8761 0.58653-3.6184-0.08624-0.74233-0.23624-0.79194 0.52606-1.558 0.76231-0.76611 0.81064-0.6164 1.5534-0.53382 0.74274 0.08258 2.1221 0.26337 3.6142-0.60306 1.4921-0.86643 2.0202-2.1531 2.3167-2.8391 0.29645-0.68602 0.19132-0.80341 1.2345-1.0857 1.0432-0.28231 1.0129-0.12921 1.6149 0.31368 0.60196 0.44289 1.7064 1.289 3.4318 1.2847 1.7254-0.0043 2.8259-0.85533 3.4256-1.3012 0.59974-0.44589 0.56717-0.60113 1.6118-0.32401 1.0446 0.27712 0.93934 0.39654 1.2392 1.0811 0.29986 0.68453 0.83678 1.9698 2.3332 2.8288 1.4964 0.859 2.8755 0.67332 3.6179 0.58704 0.74233-0.08628 0.79194-0.23676 1.558 0.52555 0.76611 0.7623 0.61382 0.81064 0.53124 1.5534-0.08258 0.74275-0.26078 2.1221 0.60565 3.6142 0.86643 1.4921 2.1531 2.0202 2.8391 2.3167 0.68602 0.29645 0.80392 0.19132 1.0862 1.2346 0.28231 1.0432 0.1287 1.0129-0.31419 1.6149-0.4429 0.60195-1.289 1.7064-1.2847 3.4318 0.0043 1.7254 0.85584 2.8259 1.3017 3.4256 0.44588 0.59974 0.60062 0.56717 0.3235 1.6118-0.27712 1.0446-0.39654 0.93986-1.0811 1.2397-0.68454 0.29985-1.9698 0.83626-2.8288 2.3327-0.859 1.4964-0.6728 2.8755-0.58653 3.6179 0.08628 0.74233 0.23676 0.79194-0.52555 1.558-0.76231 0.76611-0.81116 0.61382-1.5539 0.53124-0.74275-0.0826-2.1221-0.26027-3.6143 0.60616-1.4921 0.86644-2.0233 2.1531-2.3198 2.8391-0.24633 0.57005-0.22224 0.74776-0.80254 0.95343-0.08286-0.046-0.17925-0.0898-0.29352-0.1328-0.96214-0.36199-1.0882 0.12751-1.8779 0.71571-0.71013 0.52895-1.2184 0.5911-1.1886 1.2888-0.06706-7e-3 -0.13033-8e-3 -0.18087-2e-3 -0.16123 0.0187-0.22496 0.0454-0.39584-0.0527-0.17088-0.0981-0.1793-0.16656-0.24443-0.31522-0.06513-0.14868-0.21773-0.37158-0.52348-0.45269-0.30577-0.0811-0.6807 0.0739-0.67903 0.13332 0.0016 0.0594 0.25266-0.0426 0.50436 0.11576 0.25171 0.15837 0.23338 0.25624 0.28887 0.38292 0.05548 0.1267 0.15508 0.36451 0.43202 0.52349 0.27693 0.15896 0.53183 0.12449 0.66921 0.10851 0.1049-0.0122 0.13623-0.0302 0.20774 0.0243 0.21075 0.72266 0.72116 0.6429 1.6056 0.97565 0.96214 0.36199 1.2562 0.7975 2.0459 0.20929 0.57854-0.43092 0.52042-0.74207 0.38086-1.25 1.2857-0.53767 1.9604-1.5578 2.267-2.2676 0.3479-0.80508 0.39347-1.1769 1.3141-1.7115 0.92066-0.5346 1.2662-0.39043 2.1379-0.29351 0.87166 0.0969 2.3264-0.0167 3.5321-1.2284s1.3136-2.6676 1.2123-3.5388c-0.10126-0.87117-0.24735-1.2156 0.28267-2.1389 0.53002-0.92331 0.90043-0.9705 1.7038-1.3224 0.80334-0.35189 2.0055-1.1766 2.4438-2.8288 0.43829-1.6522-0.1971-2.9652-0.72037-3.669-0.52328-0.70383-0.82211-0.92957-0.82476-1.9942-0.0026-1.0646 0.2962-1.2909 0.81597-1.9973 0.51977-0.70642 1.1462-2.0227 0.6997-3.6726-0.4465-1.65-1.6516-2.47-2.4567-2.8179-0.80508-0.3479-1.1769-0.39346-1.7115-1.3141-0.5346-0.92066-0.39044-1.2667-0.29352-2.1384 0.09692-0.87167-0.0167-2.3259-1.2283-3.5316-1.2117-1.2056-2.6676-1.3141-3.5388-1.2128-0.87117 0.10126-1.2156 0.24786-2.1389-0.28215-0.92331-0.53002-0.9705-0.90044-1.3224-1.7038-0.3519-0.8033-1.1766-2.006-2.8288-2.4442-0.41304-0.10957-0.80507-0.15182-1.1715-0.14573z" fill="#336790" style="paint-order:markers fill stroke"/> + </g> + <g id="g1747" stroke-width=".22458"> + <g id="text1023" fill="#336790" aria-label="SETUP"> + <path id="path1713" d="m28.181 134.8q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1715" d="m32.036 126.47h4.4917v1.4036h-2.9944v1.9885h2.2458v1.4036h-2.2458v2.0353h2.9944v1.4036h-4.4917z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1717" d="m42.885 126.47v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1719" d="m43.627 126.47h1.4972v5.2403q0 0.54976 0.04094 0.74861 0.08773 0.42109 0.3743 0.67842 0.29242 0.25734 0.70767 0.25734t0.70182-0.25734q0.29242-0.25733 0.38015-0.67842 0.04094-0.19885 0.04094-0.74861v-5.2403h1.4972v5.2403q0 0.82464-0.09358 1.2223-0.19885 0.82463-0.88897 1.3452-0.68428 0.52052-1.6376 0.52052t-1.6434-0.52052q-0.68428-0.52052-0.88312-1.3452-0.09358-0.3977-0.09358-1.2223z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1721" d="m51.482 127.88v2.3862h0.36261q0.50297 0 0.73106-0.0468 0.3977-0.0819 0.63164-0.39185 0.23979-0.30997 0.23979-0.75446t-0.23979-0.75446q-0.23394-0.30997-0.63164-0.39185-0.22809-0.0468-0.73106-0.0468zm-1.4972-1.4036h1.8481q0.86558 0 1.234 0.0877 0.83049 0.19885 1.351 0.87143 0.52636 0.66673 0.52636 1.6376t-0.52636 1.6434q-0.52052 0.66673-1.351 0.86558-0.36846 0.0877-1.234 0.0877h-0.35091v3.0412h-1.4972z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> </g> + <g id="text1661" fill="#e5b62f" aria-label="TOOLS"> + <path id="path1724" d="m60.834 126.47v1.4036h-1.8715v6.831h-1.4972v-6.831h-1.8715v-1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1726" d="m63.068 129.08q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94745 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825-1.0176 0-1.6668-0.60825-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1728" d="m69.051 129.08q-0.08773 0.47958-0.08773 1.5089 0 1.0293 0.08773 1.5089 0.10527 0.5673 0.386 0.93576 0.28658 0.36261 0.74276 0.36261 0.45618 0 0.73691-0.36261 0.28658-0.36846 0.39185-0.93576 0.08773-0.48543 0.08773-1.5089 0-1.0293-0.08773-1.5089-0.10527-0.56731-0.39185-0.92991-0.28073-0.36846-0.73691-0.36846-0.45618 0-0.74276 0.36846-0.28073 0.3626-0.386 0.92991zm-1.4212 3.55q-0.16376-0.64918-0.16376-2.0411 0-1.3919 0.16376-2.0411 0.23394-0.94745 0.88312-1.5557 0.64918-0.61409 1.6668-0.61409 1.0176 0 1.6668 0.61409 0.64918 0.60825 0.88312 1.5557 0.16376 0.64919 0.16376 2.0411 0 1.392-0.16376 2.0411-0.23394 0.94746-0.88312 1.5616-0.64918 0.60825-1.6668 0.60825-1.0176 0-1.6668-0.60825-0.64918-0.61409-0.88312-1.5616z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1730" d="m73.917 134.71v-8.2347h1.4972v6.8311h3.3687v1.4036z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + <path id="path1732" d="m82.029 134.8q-0.95916 0-2.1639-0.18715v-1.427q1.4095 0.21055 2.1639 0.21055 1.041 0 1.041-1.1112 0-0.30997-0.15206-0.58485-0.14621-0.28073-0.41524-0.386-0.16961-0.0643-0.46788-0.10527-0.29827-0.0409-0.61409-0.11697-0.30997-0.0819-0.59655-0.25149-1.0995-0.65503-1.0995-2.0704 0-1.0118 0.64334-1.7019 0.64918-0.69012 1.661-0.69012 0.78955 0 2.1639 0.14621v1.4095q-1.4212-0.15206-2.1639-0.15206-0.22224 0-0.39185 0.0994-0.16376 0.0994-0.25149 0.25734-0.08773 0.15791-0.12867 0.32167-0.03509 0.16375-0.03509 0.32166 0 0.69013 0.46788 0.90067 0.15206 0.0702 0.69012 0.15791 0.54391 0.0819 0.87728 0.25734 0.46788 0.23979 0.772 0.61994 0.30412 0.38015 0.42109 0.76616 0.11697 0.386 0.11697 0.80124 0 1.1346-0.70182 1.8247-0.70182 0.69012-1.8364 0.69012z" style="font-feature-settings:normal;font-variant-caps:normal;font-variant-ligatures:normal;font-variant-numeric:normal"/> + </g> + </g> </g> + </g> </svg> diff --git a/docs/index.rst b/docs/index.rst index b886c8f8..0f52c360 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -26,6 +26,6 @@ designed to facilitate packaging Python projects. Development guide <development/index> Backward compatibility & deprecated practice <deprecated/index> Changelog <history> - Artwork <images/README> + artwork .. tidelift-referral-banner:: diff --git a/docs/pkg_resources.rst b/docs/pkg_resources.rst index c1158189..21ff6dc1 100644 --- a/docs/pkg_resources.rst +++ b/docs/pkg_resources.rst @@ -13,8 +13,8 @@ packages. Use of ``pkg_resources`` is discouraged in favor of `importlib.resources <https://docs.python.org/3/library/importlib.html#module-importlib.resources>`_, `importlib.metadata <https://docs.python.org/3/library/importlib.metadata.html>`_, -and their backports (`resources <https://pypi.org/project/importlib_resources>`_, -`metadata <https://pypi.org/project/importlib_metadata>`_). +and their backports (:pypi:`importlib_resources`, +:pypi:`importlib_metadata`). Please consider using those libraries instead of pkg_resources. diff --git a/docs/setuptools.rst b/docs/setuptools.rst index c5a89adc..aa638300 100644 --- a/docs/setuptools.rst +++ b/docs/setuptools.rst @@ -21,8 +21,9 @@ Feature Highlights: individually in setup.py * Automatically include all relevant files in your source distributions, - without needing to create a ``MANIFEST.in`` file, and without having to force - regeneration of the ``MANIFEST`` file when your source tree changes. + without needing to create a |MANIFEST.in|_ file, and without having to force + regeneration of the ``MANIFEST`` file when your source tree changes + [#manifest]_. * Automatically generate wrapper scripts or Windows (console and GUI) .exe files for any number of "main" functions in your project. (Note: this is not @@ -201,13 +202,27 @@ As a consequence, the resulting dictionary will include no such options. -Mailing List and Bug Tracker -============================ +Forum and Bug Tracker +===================== -Please use the `distutils-sig mailing list`_ for questions and discussion about +Please use `GitHub Discussions`_ for questions and discussion about setuptools, and the `setuptools bug tracker`_ ONLY for issues you have -confirmed via the list are actual bugs, and which you have reduced to a minimal +confirmed via the forum are actual bugs, and which you have reduced to a minimal set of steps to reproduce. -.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ +.. _GitHub Discussions: https://github.com/pypa/setuptools/discussions .. _setuptools bug tracker: https://github.com/pypa/setuptools/ + + +---- + + +.. [#manifest] The default behaviour for ``setuptools`` will work well for pure + Python packages, or packages with simple C extensions (that don't require + any special C header). See :ref:`Controlling files in the distribution` and + :doc:`userguide/datafiles` for more information about complex scenarios, if + you want to include other types of files. + + +.. |MANIFEST.in| replace:: ``MANIFEST.in`` +.. _MANIFEST.in: https://packaging.python.org/en/latest/guides/using-manifest-in/ diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst index 69cf36e6..9817e639 100644 --- a/docs/userguide/datafiles.rst +++ b/docs/userguide/datafiles.rst @@ -5,11 +5,11 @@ Data Files Support The distutils have traditionally allowed installation of "data files", which are placed in a platform-specific location. However, the most common use case for data files distributed with a package is for use *by* the package, usually -by including the data files in the package directory. +by including the data files **inside the package directory**. -Setuptools offers three ways to specify data files to be included in your -packages. First, you can simply use the ``include_package_data`` keyword, -e.g.:: +Setuptools offers three ways to specify this most common type of data files to +be included in your package's [#datafiles]_. +First, you can simply use the ``include_package_data`` keyword, e.g.:: from setuptools import setup, find_packages setup( @@ -18,9 +18,10 @@ e.g.:: ) This tells setuptools to install any data files it finds in your packages. -The data files must be specified via the distutils' ``MANIFEST.in`` file. +The data files must be specified via the |MANIFEST.in|_ file. (They can also be tracked by a revision control system, using an appropriate -plugin. See the section below on :ref:`Adding Support for Revision +plugin such as :pypi:`setuptools-scm` or :pypi:`setuptools-svn`. +See the section below on :ref:`Adding Support for Revision Control Systems` for information on how to write such plugins.) If you want finer-grained control over what files are included (for example, @@ -87,14 +88,13 @@ When building an ``sdist``, the datafiles are also drawn from the ``package_name.egg-info/SOURCES.txt`` file, so make sure that this is removed if the ``setup.py`` ``package_data`` list is updated before calling ``setup.py``. -(Note: although the ``package_data`` argument was previously only available in -``setuptools``, it was also added to the Python ``distutils`` package as of -Python 2.4; there is `some documentation for the feature`__ available on the -python.org website. If using the setuptools-specific ``include_package_data`` -argument, files specified by ``package_data`` will *not* be automatically -added to the manifest unless they are listed in the MANIFEST.in file.) +.. note:: + If using the ``include_package_data`` argument, files specified by + ``package_data`` will *not* be automatically added to the manifest unless + they are listed in the |MANIFEST.in|_ file or by a plugin like + :pypi:`setuptools-scm` or :pypi:`setuptools-svn`. -__ https://docs.python.org/3/distutils/setupscript.html#installing-package-data +.. https://docs.python.org/3/distutils/setupscript.html#installing-package-data Sometimes, the ``include_package_data`` or ``package_data`` options alone aren't sufficient to precisely define what files you want included. For @@ -125,11 +125,13 @@ included as a result of using ``include_package_data``. In summary, the three options allow you to: ``include_package_data`` - Accept all data files and directories matched by ``MANIFEST.in``. + Accept all data files and directories matched by |MANIFEST.in|_ or added by + a :ref:`plugin <Adding Support for Revision Control Systems>`. ``package_data`` Specify additional patterns to match files that may or may - not be matched by ``MANIFEST.in`` or found in source control. + not be matched by |MANIFEST.in|_ or added by + a :ref:`plugin <Adding Support for Revision Control Systems>`. ``exclude_package_data`` Specify patterns for data files and directories that should *not* be @@ -154,14 +156,22 @@ Typically, existing programs manipulate a package's ``__file__`` attribute in order to find the location of data files. However, this manipulation isn't compatible with PEP 302-based import hooks, including importing from zip files and Python Eggs. It is strongly recommended that, if you are using data files, -you should use the :ref:`ResourceManager API` of ``pkg_resources`` to access -them. The ``pkg_resources`` module is distributed as part of setuptools, so if -you're using setuptools to distribute your package, there is no reason not to -use its resource management API. See also `Importlib Resources`_ for -a quick example of converting code that uses ``__file__`` to use -``pkg_resources`` instead. +you should use :mod:`importlib.resources` to access them. +:mod:`importlib.resources` was added to Python 3.7 and the latest version of +the library is also available via the :pypi:`importlib-resources` backport. +See :doc:`importlib-resources:using` for detailed instructions [#importlib]_. + +.. tip:: Files inside the package directory should be *read-only* to avoid a + series of common problems (e.g. when multiple users share a common Python + installation, when the package is loaded from a zip file, or when multiple + instances of a Python application run in parallel). -.. _Importlib Resources: https://docs.python.org/3/library/importlib.html#module-importlib.resources + If your Python package needs to write to a file for shared data or configuration, + you can use standard platform/OS-specific system directories, such as + ``~/.local/config/$appname`` or ``/usr/share/$appname/$version`` (Linux specific) [#system-dirs]_. + A common approach is to add a read-only template file to the package + directory that is then copied to the correct system directory if no + pre-existing file is found. Non-Package Data Files @@ -174,4 +184,23 @@ fall back to the platform-specific location for installing data files, there is no supported facility to reliably retrieve these resources. Instead, the PyPA recommends that any data files you wish to be accessible at -run time be included in the package. +run time be included **inside the package**. + + +---- + +.. [#datafiles] ``setuptools`` consider a *package data file* any non-Python + file **inside the package directory** (i.e., that co-exists in the same + location as the regular ``.py`` files being distributed). + +.. [#system-dirs] These locations can be discovered with the help of + third-party libraries such as :pypi:`platformdirs`. + +.. [#importlib] Recent versions of :mod:`importlib.resources` available in + Pythons' standard library should be API compatible with + :pypi:`importlib-metadata`. However this might vary depending on which version + of Python is installed. + + +.. |MANIFEST.in| replace:: ``MANIFEST.in`` +.. _MANIFEST.in: https://packaging.python.org/en/latest/guides/using-manifest-in/ diff --git a/docs/userguide/declarative_config.rst b/docs/userguide/declarative_config.rst index d1c25df1..52379dbf 100644 --- a/docs/userguide/declarative_config.rst +++ b/docs/userguide/declarative_config.rst @@ -1,8 +1,8 @@ .. _declarative config: ------------------------------------------ -Configuring setup() using setup.cfg files ------------------------------------------ +------------------------------------------------ +Configuring setuptools using ``setup.cfg`` files +------------------------------------------------ .. note:: New in 30.3.0 (8 Dec 2016). @@ -24,27 +24,22 @@ boilerplate code in some cases. [metadata] name = my_package - version = attr: src.VERSION + version = attr: my_package.VERSION description = My package description long_description = file: README.rst, CHANGELOG.rst, LICENSE.rst keywords = one, two license = BSD 3-Clause License classifiers = Framework :: Django - License :: OSI Approved :: BSD License Programming Language :: Python :: 3 - Programming Language :: Python :: 3.5 [options] zip_safe = False include_package_data = True packages = find: - scripts = - bin/first.py - bin/second.py install_requires = requests - importlib; python_version == "2.6" + importlib-metadata; python_version<"3.8" [options.package_data] * = *.txt, *.rst @@ -52,7 +47,7 @@ boilerplate code in some cases. [options.entry_points] console_scripts = - executable-name = package.module:function + executable-name = my_package.module:function [options.extras_require] pdf = ReportLab>=1.2; RXP @@ -60,8 +55,10 @@ boilerplate code in some cases. [options.packages.find] exclude = - src.subpackage1 - src.subpackage2 + examples* + tools* + docs* + my_package.tests* Metadata and options are set in the config sections of the same name. @@ -222,10 +219,10 @@ data_files section 40.6.0 [# .. [#opt-1] In the ``package_data`` section, a key named with a single asterisk (``*``) refers to all packages, in lieu of the empty string used in ``setup.py``. - + .. [#opt-2] In the ``extras_require`` section, values are parsed as ``list-semi``. This implies that in order to include markers, they **must** be *dangling*: - + .. code-block:: ini [options.extras_require] diff --git a/docs/userguide/dependency_management.rst b/docs/userguide/dependency_management.rst index 23578a57..d15b45cb 100644 --- a/docs/userguide/dependency_management.rst +++ b/docs/userguide/dependency_management.rst @@ -28,7 +28,7 @@ other two types of dependency keyword, this one is specified in your .. code-block:: ini [build-system] - requires = ["setuptools", "wheel"] + requires = ["setuptools"] #... .. note:: @@ -43,7 +43,7 @@ other two types of dependency keyword, this one is specified in your Declaring required dependency ============================= This is where a package declares its core dependencies, without which it won't -be able to run. ``setuptools`` support automatically download and install +be able to run. ``setuptools`` supports automatically downloading and installing these dependencies when the package is installed. Although there is more finesse to it, let's start with a simple example. @@ -69,6 +69,18 @@ finesse to it, let's start with a simple example. ], ) +.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_ + + .. code-block:: toml + + [project] + # ... + dependencies = [ + "docutils", + "BazSpam == 1.1", + ] + # ... + When your project is installed (e.g. using pip), all of the dependencies not already installed will be located (via PyPI), downloaded, built (if necessary), @@ -78,7 +90,7 @@ that verify the availability of the specified dependencies at runtime. Platform specific dependencies ------------------------------ -Setuptools offer the capability to evaluate certain conditions before blindly +Setuptools offers the capability to evaluate certain conditions before blindly installing everything listed in ``install_requires``. This is great for platform specific dependencies. For example, the ``enum`` package was added in Python 3.4, therefore, package that depends on it can elect to install it only when @@ -104,6 +116,17 @@ the Python version is older than 3.4. To accomplish this ], ) +.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_ + + .. code-block:: toml + + [project] + # ... + dependencies = [ + "enum34; python_version<'3.4'", + ] + # ... + Similarly, if you also wish to declare ``pywin32`` with a minimal version of 1.0 and only install it if the user is using a Windows operating system: @@ -129,6 +152,18 @@ and only install it if the user is using a Windows operating system: ], ) +.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_ + + .. code-block:: toml + + [project] + # ... + dependencies = [ + "enum34; python_version<'3.4'", + "pywin32 >= 1.0; platform_system=='Windows'", + ] + # ... + The environmental markers that may be used for testing platform types are detailed in `PEP 508 <https://www.python.org/dev/peps/pep-0508/>`_. @@ -215,9 +250,9 @@ distributions, if the package's dependencies aren't already installed: Optional dependencies ===================== Setuptools allows you to declare dependencies that only get installed under -specific circumstances. These dependencies are specified with ``extras_require`` +specific circumstances. These dependencies are specified with the ``extras_require`` keyword and are only installed if another package depends on it (either -directly or indirectly) This makes it convenient to declare dependencies for +directly or indirectly). This makes it convenient to declare dependencies for ancillary functions such as "tests" and "docs". .. note:: @@ -249,50 +284,18 @@ dependencies for it to work: }, ) -The name ``PDF`` is an arbitrary identifier of such a list of dependencies, to -which other components can refer and have them installed. There are two common -use cases. - -First is the console_scripts entry point: +.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_ -.. tab:: setup.cfg + .. code-block:: toml - .. code-block:: ini + # ... + [project.optional-dependencies] + PDF = ["ReportLab>=1.2", "RXP"] - [metadata] - name = Project A - #... - - [options] - #... - entry_points= - [console_scripts] - rst2pdf = project_a.tools.pdfgen [PDF] - rst2html = project_a.tools.htmlgen - -.. tab:: setup.py - - .. code-block:: python - - setup( - name="Project-A", - ..., - entry_points={ - "console_scripts": [ - "rst2pdf = project_a.tools.pdfgen [PDF]", - "rst2html = project_a.tools.htmlgen", - ], - }, - ) - -This syntax indicates that the entry point (in this case a console script) -is only valid when the PDF extra is installed. It is up to the installer -to determine how to handle the situation where PDF was not indicated -(e.g. omit the console script, provide a warning when attempting to load -the entry point, assume the extras are present and let the implementation -fail later). +The name ``PDF`` is an arbitrary identifier of such a list of dependencies, to +which other components can refer and have them installed. -The second use case is that other package can use this "extra" for their +A use case for this approach is that other package can use this "extra" for their own dependencies. For example, if "Project-B" needs "project A" with PDF support installed, it might declare the dependency like this: @@ -319,6 +322,17 @@ installed, it might declare the dependency like this: ..., ) +.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_ + + .. code-block:: toml + + [project] + name = "Project-B" + # ... + dependencies = [ + "Project-A[PDF]" + ] + This will cause ReportLab to be installed along with project A, if project B is installed -- even if project A was already installed. In this way, a project can encapsulate groups of optional "downstream dependencies" under a feature @@ -329,18 +343,61 @@ ReportLab in order to provide PDF support, Project B's setup information does not need to change, but the right packages will still be installed if needed. .. note:: - Best practice: if a project ends up not needing any other packages to + Best practice: if a project ends up no longer needing any other packages to support a feature, it should keep an empty requirements list for that feature in its ``extras_require`` argument, so that packages depending on that feature don't break (due to an invalid feature name). +Historically ``setuptools`` also used to support extra dependencies in console +scripts, for example: + +.. tab:: setup.cfg + + .. code-block:: ini + + [metadata] + name = Project A + #... + + [options] + #... + entry_points= + [console_scripts] + rst2pdf = project_a.tools.pdfgen [PDF] + rst2html = project_a.tools.htmlgen + +.. tab:: setup.py + + .. code-block:: python + + setup( + name="Project-A", + ..., + entry_points={ + "console_scripts": [ + "rst2pdf = project_a.tools.pdfgen [PDF]", + "rst2html = project_a.tools.htmlgen", + ], + }, + ) + +This syntax indicates that the entry point (in this case a console script) +is only valid when the PDF extra is installed. It is up to the installer +to determine how to handle the situation where PDF was not indicated +(e.g. omit the console script, provide a warning when attempting to load +the entry point, assume the extras are present and let the implementation +fail later). + +.. warning:: + ``pip`` and other tools might not support this use case for extra + dependencies, therefore this practice is considered **deprecated**. + See :doc:`PyPUG:specifications/entry-points`. + Python requirement ================== In some cases, you might need to specify the minimum required python version. -This is handled with the ``python_requires`` keyword supplied to ``setup.cfg`` -or ``setup.py``. - +This can be configured as shown in the example below. .. tab:: setup.cfg @@ -360,6 +417,27 @@ or ``setup.py``. setup( name="Project-B", - python_requires=[">=3.6"], + python_requires=">=3.6", ..., ) + + +.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_ + + .. code-block:: toml + + [project] + name = "Project-B" + requires-python = ">=3.6" + # ... + +---- + +.. rubric:: Notes + +.. [#experimental] + While the ``[build-system]`` table should always be specified in the + ``pyproject.toml`` file, support for adding package metadata and build configuration + options via the ``[project]`` and ``[tool.setuptools]`` tables is still + experimental and might change (or be completely removed) in future releases. + See :doc:`/userguide/pyproject_config`. diff --git a/docs/userguide/distribution.rst b/docs/userguide/distribution.rst index 2872dacd..db0f1a5f 100644 --- a/docs/userguide/distribution.rst +++ b/docs/userguide/distribution.rst @@ -162,7 +162,7 @@ Specifying Your Project's Version --------------------------------- Setuptools can work well with most versioning schemes. Over the years, -setuptools has tried to closely follow the +setuptools has tried to closely follow the `PEP 440 <https://www.python.org/dev/peps/pep-0440/>`_ scheme, but it also supports legacy versions. There are, however, a few special things to watch out for, in order to ensure that setuptools and diff --git a/docs/userguide/entry_point.rst b/docs/userguide/entry_point.rst index 21edc697..b97419c4 100644 --- a/docs/userguide/entry_point.rst +++ b/docs/userguide/entry_point.rst @@ -54,11 +54,32 @@ above example, to create a command ``hello-world`` that invokes ``timmins.hello_world``, add a console script entry point to ``setup.cfg``: -.. code-block:: ini +.. tab:: setup.cfg + + .. code-block:: ini + + [options.entry_points] + console_scripts = + hello-world = timmins:hello_world + +.. tab:: setup.py + + .. code-block:: python + + from setuptools import setup + + setup( + name='timmins', + version='0.0.1', + packages=['timmins'], + # ... + entry_points={ + 'console_scripts': [ + 'hello-world=timmins:hello_world', + ] + } + ) - [options.entry_points] - console_scripts = - hello-world = timmins:hello_world After installing the package, a user may invoke that function by simply calling ``hello-world`` on the command line. @@ -99,7 +120,7 @@ and tools like ``pip`` create wrapper scripts that invoke those commands. For a project wishing to solicit entry points, Setuptools recommends the `importlib.metadata <https://docs.python.org/3/library/importlib.metadata.html>`_ module (part of stdlib since Python 3.8) or its backport, -`importlib_metadata <https://pypi.org/project/importlib_metadata>`_. +:pypi:`importlib_metadata`. For example, to find the console script entry points from the example above: diff --git a/docs/userguide/extension.rst b/docs/userguide/extension.rst index d74ca3fe..21fb05b6 100644 --- a/docs/userguide/extension.rst +++ b/docs/userguide/extension.rst @@ -194,8 +194,8 @@ Adding Support for Revision Control Systems If the files you want to include in the source distribution are tracked using Git, Mercurial or SVN, you can use the following packages to achieve that: -- Git and Mercurial: `setuptools_scm <https://pypi.org/project/setuptools_scm/>`_ -- SVN: `setuptools_svn <https://pypi.org/project/setuptools_svn/>`_ +- Git and Mercurial: :pypi:`setuptools_scm` +- SVN: :pypi:`setuptools_svn` If you would like to create a plugin for ``setuptools`` to find files tracked by another revision control system, you can do so by adding an entry point to diff --git a/docs/userguide/index.rst b/docs/userguide/index.rst index eca5a85a..49655acd 100644 --- a/docs/userguide/index.rst +++ b/docs/userguide/index.rst @@ -31,6 +31,7 @@ quickstart provides an overview of the new workflow. distribution extension declarative_config + pyproject_config keywords commands functionalities_rewrite diff --git a/docs/userguide/miscellaneous.rst b/docs/userguide/miscellaneous.rst index 3df327d7..5fd2f0a8 100644 --- a/docs/userguide/miscellaneous.rst +++ b/docs/userguide/miscellaneous.rst @@ -94,3 +94,66 @@ correctly when installed as a zipfile, correct any problems if you can, and then make an explicit declaration of ``True`` or ``False`` for the ``zip_safe`` flag, so that it will not be necessary for ``bdist_egg`` to try to guess whether your project can work as a zipfile. + + +.. _Controlling files in the distribution: + +Controlling files in the distribution +------------------------------------- + +For the most common use cases, ``setuptools`` will automatically find out which +files are necessary for distributing the package. +This includes all :term:`pure Python modules <Pure Module>` in the +``py_modules`` or ``packages`` configuration, and the C sources (but not C +headers) listed as part of extensions when creating a :term:`Source +Distribution (or "sdist")`. + +However, when building more complex packages (e.g. packages that include +non-Python files, or that need to use custom C headers), you might find that +not all files present in your project folder are included in package +:term:`distribution archive <Distribution Package>`. + +In these situations you can use a ``setuptools`` +:ref:`plugin <Adding Support for Revision Control Systems>`, +such as :pypi:`setuptools-scm` or :pypi:`setuptools-svn` to automatically +include all files tracked by your Revision Control System into the ``sdist``. + +.. _Using MANIFEST.in: + +Alternatively, if you need finer control, you can add a ``MANIFEST.in`` file at +the root of your project. +This file contains instructions that tell ``setuptools`` which files exactly +should be part of the ``sdist`` (or not). +A comprehensive guide to ``MANIFEST.in`` syntax is available at the +:doc:`PyPA's Packaging User Guide <PyPUG:guides/using-manifest-in>`. + +Once the correct files are present in the ``sdist``, they can then be used by +binary extensions during the build process, or included in the final +:term:`wheel <Wheel>` [#build-process]_ if you configure ``setuptools`` with +``include_package_data=True``. + +.. important:: + Please note that, when using ``include_package_data=True``, only files **inside + the package directory** are included in the final ``wheel``, by default. + + So for example, if you create a :term:`Python project <Project>` that uses + :pypi:`setuptools-scm` and have a ``tests`` directory outside of the package + folder, the ``tests`` directory will be present in the ``sdist`` but not in the + ``wheel`` [#wheel-vs-sdist]_. + + See :doc:`/userguide/datafiles` for more information. + +---- + +.. [#build-process] + You can think about the build process as two stages: first the ``sdist`` + will be created and then the ``wheel`` will be produced from that ``sdist``. + +.. [#wheel-vs-sdist] + This happens because the ``sdist`` can contain files that are useful during + development or the build process itself, but not in runtime (e.g. tests, + docs, examples, etc...). + The ``wheel``, on the other hand, is a file format that has been optimized + and is ready to be unpacked into a running installation of Python or + :term:`Virtual Environment`. + Therefore it only contains items that are required during runtime. diff --git a/docs/userguide/package_discovery.rst b/docs/userguide/package_discovery.rst index 61da2d66..38119bc6 100644 --- a/docs/userguide/package_discovery.rst +++ b/docs/userguide/package_discovery.rst @@ -16,8 +16,9 @@ Package Discovery and Namespace Package place to start. ``Setuptools`` provide powerful tools to handle package discovery, including -support for namespace package. Normally, you would specify the package to be -included manually in the following manner: +support for namespace package. + +Normally, you would specify the package to be included manually in the following manner: .. tab:: setup.cfg @@ -38,8 +39,205 @@ included manually in the following manner: packages=['mypkg1', 'mypkg2'] ) -This can get tiresome really quickly. To speed things up, we introduce two -functions provided by setuptools: +.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_ + + .. code-block:: toml + + # ... + [tool.setuptools] + packages = ["mypkg1", "mypkg2"] + # ... + + +If your packages are not in the root of the repository you also need to +configure ``package_dir``: + +.. tab:: setup.cfg + + .. code-block:: ini + + [options] + # ... + package_dir = + = src + # directory containing all the packages (e.g. src/mypkg1, src/mypkg2) + # OR + package_dir = + mypkg1 = lib1 + # mypkg1.mod corresponds to lib1/mod.py + # mypkg1.subpkg.mod corresponds to lib1/subpkg/mod.py + mypkg2 = lib2 + # mypkg2.mod corresponds to lib2/mod.py + mypkg2.subpkg = lib3 + # mypkg2.subpkg.mod corresponds to lib3/mod.py + +.. tab:: setup.py + + .. code-block:: python + + setup( + # ... + package_dir = {"": "src"} + # directory containing all the packages (e.g. src/mypkg1, src/mypkg2) + ) + + # OR + + setup( + # ... + package_dir = { + "mypkg1": "lib1", # mypkg1.mod corresponds to lib1/mod.py + # mypkg1.subpkg.mod corresponds to lib1/subpkg/mod.py + "mypkg2": "lib2", # mypkg2.mod corresponds to lib2/mod.py + "mypkg2.subpkg": "lib3" # mypkg2.subpkg.mod corresponds to lib3/mod.py + # ... + ) + +.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_ + + .. code-block:: toml + + [tool.setuptools] + # ... + package-dir = {"" = "src"} + # directory containing all the packages (e.g. src/mypkg1, src/mypkg2) + + # OR + + [tool.setuptools.package-dir] + mypkg1 = "lib1" + # mypkg1.mod corresponds to lib1/mod.py + # mypkg1.subpkg.mod corresponds to lib1/subpkg/mod.py + mypkg2 = "lib2" + # mypkg2.mod corresponds to lib2/mod.py + "mypkg2.subpkg" = "lib3" + # mypkg2.subpkg.mod corresponds to lib3/mod.py + # ... + +This can get tiresome really quickly. To speed things up, you can rely on +setuptools automatic discovery, or use the provided tools, as explained in +the following sections. + + +.. _auto-discovery: + +Automatic discovery +=================== + +.. warning:: Automatic discovery is an **experimental** feature and might change + (or be completely removed) in the future. + See :ref:`custom-discovery` for a stable way of configuring ``setuptools``. + +By default ``setuptools`` will consider 2 popular project layouts, each one with +its own set of advantages and disadvantages [#layout1]_ [#layout2]_ as +discussed in the following sections. + +Setuptools will automatically scan your project directory looking for these +layouts and try to guess the correct values for the :ref:`packages <declarative +config>` and :doc:`py_modules </references/keywords>` configuration. + +.. important:: + Automatic discovery will **only** be enabled if you **don't** provide any + configuration for ``packages`` and ``py_modules``. + If at least one of them is explicitly set, automatic discovery will not take place. + + **Note**: specifying ``ext_modules`` might also prevent auto-discover from + taking place, unless your opt into :doc:`pyproject_config` (which will + disable the backward compatible behaviour). + +.. _src-layout: + +src-layout +---------- +The project should contain a ``src`` directory under the project root and +all modules and packages meant for distribution are placed inside this +directory:: + + project_root_directory + ├── pyproject.toml + ├── setup.cfg # or setup.py + ├── ... + └── src/ + └── mypkg/ + ├── __init__.py + ├── ... + └── mymodule.py + +This layout is very handy when you wish to use automatic discovery, +since you don't have to worry about other Python files or folders in your +project root being distributed by mistake. In some circumstances it can be +also less error-prone for testing or when using :pep:`420`-style packages. +On the other hand you cannot rely on the implicit ``PYTHONPATH=.`` to fire +up the Python REPL and play with your package (you will need an +`editable install`_ to be able to do that). + +.. _flat-layout: + +flat-layout +----------- +*(also known as "adhoc")* + +The package folder(s) are placed directly under the project root:: + + project_root_directory + ├── pyproject.toml + ├── setup.cfg # or setup.py + ├── ... + └── mypkg/ + ├── __init__.py + ├── ... + └── mymodule.py + +This layout is very practical for using the REPL, but in some situations +it can be more error-prone (e.g. during tests or if you have a bunch +of folders or Python files hanging around your project root) + +To avoid confusion, file and folder names that are used by popular tools (or +that correspond to well-known conventions, such as distributing documentation +alongside the project code) are automatically filtered out in the case of +*flat-layout*: + +.. autoattribute:: setuptools.discovery.FlatLayoutPackageFinder.DEFAULT_EXCLUDE + +.. autoattribute:: setuptools.discovery.FlatLayoutModuleFinder.DEFAULT_EXCLUDE + +.. warning:: + If you are using auto-discovery with *flat-layout*, ``setuptools`` will + refuse to create :term:`distribution archives <Distribution Package>` with + multiple top-level packages or modules. + + This is done to prevent common errors such as accidentally publishing code + not meant for distribution (e.g. maintenance-related scripts). + + Users that purposefully want to create multi-package distributions are + advised to use :ref:`custom-discovery` or the ``src-layout``. + +There is also a handy variation of the *flat-layout* for utilities/libraries +that can be implemented with a single Python file: + +single-module distribution +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +A standalone module is placed directly under the project root, instead of +inside a package folder:: + + project_root_directory + ├── pyproject.toml + ├── setup.cfg # or setup.py + ├── ... + └── single_file_lib.py + + +.. _custom-discovery: + +Custom discovery +================ + +If the automatic discovery does not work for you +(e.g., you want to *include* in the distribution top-level packages with +reserved names such as ``tasks``, ``example`` or ``docs``, or you want to +*exclude* nested packages that would be otherwise included), you can use +the provided tools for package discovery: .. tab:: setup.cfg @@ -55,29 +253,41 @@ functions provided by setuptools: .. code-block:: python from setuptools import find_packages - # or from setuptools import find_namespace_packages +.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_ -Using ``find:`` or ``find_packages`` -==================================== -Let's start with the first tool. ``find:`` (``find_packages``) takes a source -directory and two lists of package name patterns to exclude and include, and -then return a list of ``str`` representing the packages it could find. To use -it, consider the following directory - -.. code-block:: bash + .. code-block:: toml - mypkg/ - src/ - pkg1/__init__.py - pkg2/__init__.py - additional/__init__.py + # ... + [tool.setuptools.packages] + find = {} # Scanning implicit namespaces is active by default + # OR + find = {namespace = false} # Disable implicit namespaces - setup.cfg #or setup.py -To have your setup.cfg or setup.py to automatically include packages found +Finding simple packages +----------------------- +Let's start with the first tool. ``find:`` (``find_packages()``) takes a source +directory and two lists of package name patterns to exclude and include, and +then return a list of ``str`` representing the packages it could find. To use +it, consider the following directory:: + + mypkg + ├── setup.cfg # and/or setup.py, pyproject.toml + └── src + ├── pkg1 + │ └── __init__.py + ├── pkg2 + │ └── __init__.py + ├── aditional + │ └── __init__.py + └── pkg + └── namespace + └── __init__.py + +To have setuptools to automatically include packages found in ``src`` that starts with the name ``pkg`` and not ``additional``: .. tab:: setup.cfg @@ -94,6 +304,10 @@ in ``src`` that starts with the name ``pkg`` and not ``additional``: include = pkg* exclude = additional + .. note:: + ``pkg`` does not contain an ``__init__.py`` file, therefore + ``pkg.namespace`` is ignored by ``find:`` (see ``find_namespace:`` below). + .. tab:: setup.py .. code-block:: python @@ -110,16 +324,53 @@ in ``src`` that starts with the name ``pkg`` and not ``additional``: ) + .. note:: + ``pkg`` does not contain an ``__init__.py`` file, therefore + ``pkg.namespace`` is ignored by ``find_packages()`` + (see ``find_namespace_packages()`` below). + +.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_ + + .. code-block:: toml + + [tool.setuptools.packages.find] + where = ["src"] + include = ["pkg*"] + exclude = ["additional"] + namespaces = false + + .. note:: + When using ``tool.setuptools.packages.find`` in ``pyproject.toml``, + setuptools will consider :pep:`implicit namespaces <420>` by default when + scanning your project directory. + To avoid ``pkg.namespace`` from being added to your package list + you can set ``namespaces = false``. This will prevent any folder + without an ``__init__.py`` file from being scanned. + +.. important:: + ``include`` and ``exclude`` accept strings representing :mod:`glob` patterns. + These patterns should match the **full** name of the Python module (as if it + was written in an ``import`` statement). + + For example if you have ``util`` pattern, it will match + ``util/__init__.py`` but not ``util/files/__init__.py``. + + The fact that the parent package is matched by the pattern will not dictate + if the submodule will be included or excluded from the distribution. + You will need to explicitly add a wildcard (e.g. ``util*``) + if you want the pattern to also match submodules. + .. _Namespace Packages: -Using ``find_namespace:`` or ``find_namespace_packages`` -======================================================== -``setuptools`` provides the ``find_namespace:`` (``find_namespace_packages``) -which behaves similarly to ``find:`` but works with namespace package. Before -diving in, it is important to have a good understanding of what namespace -packages are. Here is a quick recap: +Finding namespace packages +-------------------------- +``setuptools`` provides the ``find_namespace:`` (``find_namespace_packages()``) +which behaves similarly to ``find:`` but works with namespace package. + +Before diving in, it is important to have a good understanding of what +:pep:`namespace packages <420>` are. Here is a quick recap. -Suppose you have two packages named as follows: +When you have two packages organized as follows: .. code-block:: bash @@ -128,7 +379,7 @@ Suppose you have two packages named as follows: If both ``Desktop`` and ``Library`` are on your ``PYTHONPATH``, then a namespace package called ``timmins`` will be created automatically for you when -you invoke the import mechanism, allowing you to accomplish the following +you invoke the import mechanism, allowing you to accomplish the following: .. code-block:: pycon @@ -137,49 +388,108 @@ you invoke the import mechanism, allowing you to accomplish the following as if there is only one ``timmins`` on your system. The two packages can then be distributed separately and installed individually without affecting the -other one. Suppose you are packaging the ``foo`` part: +other one. -.. code-block:: bash +Now, suppose you decide to package the ``foo`` part for distribution and start +by creating a project directory organized as follows:: + + foo + ├── setup.cfg # and/or setup.py, pyproject.toml + └── src + └── timmins + └── foo + └── __init__.py - foo/ - src/ - timmins/foo/__init__.py - setup.cfg # or setup.py +If you want the ``timmins.foo`` to be automatically included in the +distribution, then you will need to specify: -and you want the ``foo`` to be automatically included, ``find:`` won't work -because timmins doesn't contain ``__init__.py`` directly, instead, you have -to use ``find_namespace:``: +.. tab:: setup.cfg -.. code-block:: ini + .. code-block:: ini - [options] - package_dir = - =src - packages = find_namespace: + [options] + package_dir = + =src + packages = find_namespace: - [options.packages.find] - where = src + [options.packages.find] + where = src -When you install the zipped distribution, ``timmins.foo`` would become + ``find:`` won't work because timmins doesn't contain ``__init__.py`` + directly, instead, you have to use ``find_namespace:``. + + You can think of ``find_namespace:`` as identical to ``find:`` except it + would count a directory as a package even if it doesn't contain ``__init__.py`` + file directly. + +.. tab:: setup.py + + .. code-block:: python + + setup( + # ... + packages=find_namespace_packages(where='src'), + package_dir={"": "src"} + # ... + ) + + When you use ``find_packages()``, all directories without an + ``__init__.py`` file will be disconsidered. + On the other hand, ``find_namespace_packages()`` will scan all + directories. + +.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_ + + .. code-block:: toml + + [tool.setuptools.packages.find] + where = ["src"] + + When using ``tool.setuptools.packages.find`` in ``pyproject.toml``, + setuptools will consider :pep:`implicit namespaces <420>` by default when + scanning your project directory. + +After installing the package distribution, ``timmins.foo`` would become available to your interpreter. -You can think of ``find_namespace:`` as identical to ``find:`` except it -would count a directory as a package even if it doesn't contain ``__init__.py`` -file directly. As a result, this creates an interesting side effect. If you -organize your package like this: +.. warning:: + Please have in mind that ``find_namespace:`` (setup.cfg), + ``find_namespace_packages()`` (setup.py) and ``find`` (pyproject.toml) will + scan **all** folders that you have in your project directory if you use a + :ref:`flat-layout`. -.. code-block:: bash + If used naïvely, this might result in unwanted files being added to your + final wheel. For example, with a project directory organized as follows:: + + foo + ├── docs + │ └── conf.py + ├── timmins + │ └── foo + │ └── __init__.py + └── tests + └── tests_foo + └── __init__.py + + final users will end up installing not only ``timmins.foo``, but also + ``docs`` and ``tests.tests_foo``. + + A simple way to fix this is to adopt the aforementioned :ref:`src-layout`, + or make sure to properly configure the ``include`` and/or ``exclude`` + accordingly. - foo/ - timmins/ - foo/__init__.py - setup.cfg # or setup.py - tests/ - test_foo/__init__.py +.. tip:: + After :ref:`building your package <building>`, you can have a look if all + the files are correct (nothing missing or extra), by running the following + commands: -a naive ``find_namespace:`` would include tests as part of your package to -be installed. A simple way to fix it is to adopt the aforementioned -``src`` layout. + .. code-block:: bash + + tar tf dist/*.tar.gz + unzip -l dist/*.whl + + This requires the ``tar`` and ``unzip`` to be installed in your OS. + On Windows you can also use a GUI program such as 7zip_. Legacy Namespace Packages @@ -228,12 +538,13 @@ And your directory should look like this .. code-block:: bash - /foo/ - src/ - timmins/ - __init__.py - foo/__init__.py - setup.cfg #or setup.py + foo + ├── setup.cfg # and/or setup.py, pyproject.toml + └── src + └── timmins + ├── __init__.py + └── foo + └── __init__.py Repeat the same for other packages and you can achieve the same result as the previous section. @@ -249,3 +560,17 @@ file contains the following: __path__ = __import__('pkgutil').extend_path(__path__, __name__) The project layout remains the same and ``setup.cfg`` remains the same. + + +---- + + +.. [#experimental] + Support for specifying package metadata and build configuration options via + ``pyproject.toml`` is experimental and might change (or be completely + removed) in the future. See :doc:`/userguide/pyproject_config`. +.. [#layout1] https://blog.ionelmc.ro/2014/05/25/python-packaging/#the-structure +.. [#layout2] https://blog.ionelmc.ro/2017/09/25/rehashing-the-src-layout/ + +.. _editable install: https://pip.pypa.io/en/stable/cli/pip_install/#editable-installs +.. _7zip: https://www.7-zip.org diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst new file mode 100644 index 00000000..47c4511e --- /dev/null +++ b/docs/userguide/pyproject_config.rst @@ -0,0 +1,218 @@ +.. _pyproject.toml config: + +----------------------------------------------------- +Configuring setuptools using ``pyproject.toml`` files +----------------------------------------------------- + +.. note:: New in 61.0.0 (**experimental**) + +.. warning:: + Support for declaring :doc:`project metadata + <PyPUG:specifications/declaring-project-metadata>` or configuring + ``setuptools`` via ``pyproject.toml`` files is still experimental and might + change (or be removed) in future releases. + +.. important:: + For the time being, ``pip`` still might require a ``setup.py`` file + to support :doc:`editable installs <pip:cli/pip_install>`. + + A simple script will suffice, for example: + + .. code-block:: python + + from setuptools import setup + + setup() + +Starting with :pep:`621`, the Python community selected ``pyproject.toml`` as +a standard way of specifying *project metadata*. +``Setuptools`` has adopted this standard and will use the information contained +in this file as an input in the build process. + +The example below illustrates how to write a ``pyproject.toml`` file that can +be used with ``setuptools``. It contains two TOML tables (identified by the +``[table-header]`` syntax): ``build-system`` and ``project``. +The ``build-system`` table is used to tell the build frontend (e.g. +:pypi:`build` or :pypi:`pip`) to use ``setuptools`` and any other plugins (e.g. +``setuptools-scm``) to build the package. +The ``project`` table contains metadata fields as described by +:doc:`PyPUG:specifications/declaring-project-metadata` guide. + +.. _example-pyproject-config: + +.. code-block:: toml + + [build-system] + requires = ["setuptools", "setuptools-scm"] + build-backend = "setuptools.build_meta" + + [project] + name = "my_package" + description = "My package description" + readme = "README.rst" + keywords = ["one", "two"] + license = {text = "BSD 3-Clause License"} + classifiers = [ + "Framework :: Django", + "Programming Language :: Python :: 3", + ] + dependencies = [ + "requests", + 'importlib-metadata; python_version<"3.8"', + ] + dynamic = ["version"] + + [project.optional-dependencies] + pdf = ["ReportLab>=1.2", "RXP"] + rest = ["docutils>=0.3", "pack ==1.1, ==1.3"] + + [project.scripts] + my-script = "my_package.module:function" + + +.. _setuptools-table: + +Setuptools-specific configuration +================================= + +While the standard ``project`` table in the ``pyproject.toml`` file covers most +of the metadata used during the packaging process, there are still some +``setuptools``-specific configurations that can be set by users that require +customization. +These configurations are completely optional and probably can be skipped when +creating simple packages. +They are equivalent to the :doc:`/references/keywords` used by the ``setup.py`` +file, and can be set via the ``tool.setuptools`` table: + +========================= =========================== ========================= +Key Value Type (TOML) Notes +========================= =========================== ========================= +``platforms`` array +``zip-safe`` boolean If not specified, ``setuptools`` will try to guess + a reasonable default for the package +``eager-resources`` array +``py-modules`` array See tip below +``packages`` array or ``find`` directive See tip below +``package-dir`` table/inline-table Used when explicitly listing ``packages`` +``namespace-packages`` array Not necessary if you use :pep:`420` +``package-data`` table/inline-table See :doc:`/userguide/datafiles` +``include-package-data`` boolean ``True`` by default +``exclude-package-data`` table/inline-table +``license-files`` array of glob patterns **Provisional** - likely to change with :pep:`639` + (by default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``) +``data-files`` table/inline-table **Deprecated** - check :doc:`/userguide/datafiles` +``script-files`` array **Deprecated** - equivalent to the ``script`` keyword in ``setup.py`` + (should be avoided in favour of ``project.scripts``) +``provides`` array **Ignored by pip** +``obsoletes`` array **Ignored by pip** +========================= =========================== ========================= + +.. note:: + The `TOML value types`_ ``array`` and ``table/inline-table`` are roughly + equivalent to the Python's :obj:`dict` and :obj:`list` data types. + +Please note that some of these configurations are deprecated or at least +discouraged, but they are made available to ensure portability. +New packages should avoid relying on deprecated/discouraged fields, and +existing packages should consider alternatives. + +.. tip:: + When both ``py-modules`` and ``packages`` are left unspecified, + ``setuptools`` will attempt to perform :ref:`auto-discovery`, which should + cover most popular project directory organization techniques, such as the + :ref:`src-layout` and the :ref:`flat-layout`. + + However if your project does not follow these conventional layouts + (e.g. you want to use a ``flat-layout`` but at the same time have custom + directories at the root of your project), you might need to use the ``find`` + directive [#directives]_ as shown below: + + .. code-block:: toml + + [tool.setuptools.packages.find] + where = ["src"] # list of folders that contain the packages (["."] by default) + include = ["my_package*"] # package names should match these glob patterns (["*"] by default) + exclude = ["my_package.tests*"] # exclude packages matching these glob patterns (empty by default) + namespaces = false # to disable scanning PEP 420 namespaces (true by default) + + Note that the glob patterns in the example above need to be matched + by the **entire** package name. This means that if you specify ``exclude = ["tests"]``, + modules like ``tests.my_package.test1`` will still be included in the distribution + (to remove them, add a wildcard to the end of the pattern: ``"tests*"``). + + Alternatively, you can explicitly list the packages in modules: + + .. code-block:: toml + + [tool.setuptools] + packages = ["my_package"] + + +.. _dynamic-pyproject-config: + +Dynamic Metadata +================ + +Note that in the first example of this page we use ``dynamic`` to identify +which metadata fields are dynamically computed during the build by either +``setuptools`` itself or the plugins installed via ``build-system.requires`` +(e.g. ``setuptools-scm`` is capable of deriving the current project version +directly from the ``git`` :wiki:`version control` system). + +Currently the following fields can be listed as dynamic: ``version``, +``classifiers``, ``description``, ``entry-points``, ``scripts``, +``gui-scripts`` and ``readme``. +When these fields are expected to be provided by ``setuptools`` a +corresponding entry is required in the ``tool.setuptools.dynamic`` table +[#entry-points]_. For example: + +.. code-block:: toml + + # ... + [project] + name = "my_package" + dynamic = ["version", "readme"] + # ... + [tool.setuptools.dynamic] + version = {attr = "my_package.VERSION"} + readme = {file = ["README.rst", "USAGE.rst"]} + +In the ``dynamic`` table, the ``attr`` directive [#directives]_ will read an +attribute from the given module [#attr]_, while ``file`` will read the contents +of all given files and concatenate them in a single string. + +================= =================== ========================= +Key Directive Notes +================= =================== ========================= +``version`` ``attr``, ``file`` +``readme`` ``file`` +``description`` ``file`` One-line text +``classifiers`` ``file`` Multi-line text with one classifier per line +``entry-points`` ``file`` INI format following :doc:`PyPUG:specifications/entry-points` + (``console_scripts`` and ``gui_scripts`` can be included) +================= =================== ========================= + +---- + +.. rubric:: Notes + +.. [#entry-points] Dynamic ``scripts`` and ``gui-scripts`` are a special case. + When resolving these metadata keys, ``setuptools`` will look for + ``tool.setuptool.dynamic.entry-points``, and use the values of the + ``console_scripts`` and ``gui_scripts`` :doc:`entry-point groups + <PyPUG:specifications/entry-points>`. + +.. [#directives] In the context of this document, *directives* are special TOML + values that are interpreted differently by ``setuptools`` (usually triggering an + associated function). Most of the times they correspond to a special TOML table + (or inline-table) with a single top-level key. + For example, you can have the ``{find = {where = ["src"], exclude=["tests*"]}}`` + directive for ``tool.setuptools.packages``, or ``{attr = "mymodule.attr"}`` + directive for ``tool.setuptools.dynamic.version``. + +.. [#attr] ``attr`` is meant to be used when the module attribute is statically + specified (e.g. as a string, list or tuple). As a rule of thumb, the + attribute should be able to be parsed with :func:`ast.literal_eval`, and + should not be modified or re-assigned. + +.. _TOML value types: https://toml.io/en/v1.0.0 diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index 6bf353a0..2f778521 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -14,9 +14,9 @@ Python packaging at a glance ============================ The landscape of Python packaging is shifting and ``Setuptools`` has evolved to only provide backend support, no longer being the de-facto packaging tool in -the market. All python package must provide a ``pyproject.toml`` and specify +the market. Every python package must provide a ``pyproject.toml`` and specify the backend (build system) it wants to use. The distribution can then -be generated with whatever tools that provides a ``build sdist``-alike +be generated with whatever tool that provides a ``build sdist``-like functionality. While this may appear cumbersome, given the added pieces, it in fact tremendously enhances the portability of your package. The change is driven under :pep:`PEP 517 <517#build-requirements>`. To learn more about Python packaging in general, @@ -32,12 +32,17 @@ package your project: .. code-block:: toml [build-system] - requires = ["setuptools", "wheel"] + requires = ["setuptools"] build-backend = "setuptools.build_meta" -Then, you will need a ``setup.cfg`` or ``setup.py`` to specify your package -information, such as metadata, contents, dependencies, etc. Here we demonstrate -the minimum +Then, you will need to specify your package information such as metadata, +contents, dependencies, etc. + +Setuptools currently supports configurations from either ``setup.cfg``, +``setup.py`` or ``pyproject.toml`` [#experimental]_ files, however, configuring new +projects via ``setup.py`` is discouraged [#setup.py]_. + +The following example demonstrates a minimum configuration: .. tab:: setup.cfg @@ -51,9 +56,11 @@ the minimum packages = mypackage install_requires = requests - importlib; python_version == "2.6" + importlib-metadata; python_version < "3.8" + + See :doc:`/userguide/declarative_config` for more information. -.. tab:: setup.py +.. tab:: setup.py [#setup.py]_ .. code-block:: python @@ -65,10 +72,26 @@ the minimum packages=['mypackage'], install_requires=[ 'requests', - 'importlib; python_version == "2.6"', + 'importlib-metadata; python_version == "3.8"', ], ) + See :doc:`/references/keywords` for more information. + +.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_ + + .. code-block:: toml + + [project] + name = "mypackage" + version = "0.0.1" + dependencies = [ + "requests", + 'importlib-metadata; python_version<"3.8"', + ] + + See :doc:`/userguide/pyproject_config` for more information. + This is what your project would look like:: ~/mypackage/ @@ -76,7 +99,7 @@ This is what your project would look like:: setup.cfg # or setup.py mypackage/__init__.py -Then, you need an builder, such as :std:doc:`PyPA build <pypa-build:index>` +Then, you need a builder, such as :std:doc:`PyPA build <pypa-build:index>` which you can obtain via ``pip install build``. After downloading it, invoke the builder:: @@ -89,80 +112,174 @@ Of course, before you release your project to PyPI, you'll want to add a bit more information to your setup script to help people find or learn about your project. And maybe your project will have grown by then to include a few dependencies, and perhaps some data files and scripts. In the next few sections, -we will walk through those additional but essential information you need +we will walk through the additional but essential information you need to specify to properly package your project. Automatic package discovery =========================== For simple projects, it's usually easy enough to manually add packages to -the ``packages`` keyword in ``setup.cfg``. However, for very large projects -, it can be a big burden to keep the package list updated. ``setuptools`` -therefore provides two convenient tools to ease the burden: :literal:`find:\ ` and -:literal:`find_namespace:\ `. To use it in your project: +the ``packages`` keyword in ``setup.cfg``. However, for very large projects, +it can be a big burden to keep the package list updated. +Therefore, ``setuptools`` provides a convenient way to automatically list all +the packages in your project directory: + +.. tab:: setup.cfg + + .. code-block:: ini + + [options] + packages = find: # OR `find_namespaces:` if you want to use namespaces + + [options.packages.find] (always `find` even if `find_namespaces:` was used before) + # This section is optional + # Each entry in this section is optional, and if not specified, the default values are: + # `where=.`, `include=*` and `exclude=` (empty). + include=mypackage* + exclude=mypackage.tests* + +.. tab:: setup.py [#setup.py]_ + + .. code-block:: python + + from setuptools import find_packages # or find_namespace_packages + + setup( + # ... + packages=find_packages( + where='.', + include=['mypackage*'], # ["*"] by default + exclude=['mypackage.tests'], # empty by default + ), + # ... + ) -.. code-block:: ini +.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_ - [options] - packages = find: + .. code-block:: toml - [options.packages.find] #optional - include=pkg1, pkg2 - exclude=pk3, pk4 + # ... + [tool.setuptools.packages] + find = {} # Scan the project directory with the default parameters -When you pass the above information, alongside other necessary ones, + # OR + [tool.setuptools.packages.find] + where = ["src"] # ["."] by default + include = ["mypackage*"] # ["*"] by default + exclude = ["mypackage.tests*"] # empty by default + namespaces = false # true by default + +When you pass the above information, alongside other necessary information, ``setuptools`` walks through the directory specified in ``where`` (omitted -here as the package reside in current directory) and filters the packages -it can find following the ``include`` (default to none), then remove -those that match the ``exclude`` and return a list of Python packages. Note -that each entry in the ``[options.packages.find]`` is optional. The above +here as the package resides in the current directory) and filters the packages +it can find following the ``include`` (defaults to none), then removes +those that match the ``exclude`` and returns a list of Python packages. The above setup also allows you to adopt a ``src/`` layout. For more details and advanced -use, go to :ref:`package_discovery` +use, go to :ref:`package_discovery`. + +.. tip:: + Starting with version 61.0.0, setuptools' automatic discovery capabilities + have been improved to detect popular project layouts (such as the + :ref:`flat-layout` and :ref:`src-layout`) without requiring any + special configuration. Check out our :ref:`reference docs <package_discovery>` + for more information, but please keep in mind that this functionality is + still considered **experimental** and might change (or even be removed) in + future releases. Entry points and automatic script creation =========================================== -Setuptools support automatic creation of scripts upon installation, that runs -code within your package if you specify them with the ``entry_points`` keyword. +Setuptools supports automatic creation of scripts upon installation, that runs +code within your package if you specify them as :doc:`entry points +<PyPUG:specifications/entry-points>`. This is what allows you to run commands like ``pip install`` instead of having -to type ``python -m pip install``. To accomplish this, add the entry_points -keyword in your ``setup.cfg``: +to type ``python -m pip install``. +The following configuration examples show how to accomplish this: -.. code-block:: ini +.. tab:: setup.cfg + + .. code-block:: ini - [options.entry_points] - console_scripts = - main = mypkg:some_func + [options.entry_points] + console_scripts = + cli-name = mypkg.mymodule:some_func -When this project is installed, a ``main`` script will be installed and will -invoke the ``some_func`` in the ``__init__.py`` file when called by the user. -For detailed usage, including managing the additional or optional dependencies, -go to :doc:`entry_point`. +.. tab:: setup.py [#setup.py]_ + + .. code-block:: python + + setup( + # ... + entry_points={ + 'console_scripts': [ + 'cli-name = mypkg.mymodule:some_func', + ] + } + ) + +.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_ + + .. code-block:: toml + + [project.scripts] + cli-name = "mypkg.mymodule:some_func" + +When this project is installed, a ``cli-name`` executable will be created. +``cli-name`` will invoke the function ``some_func`` in the +``mypkg/mymodule.py`` file when called by the user. +Note that you can also use the ``entry-points`` mechanism to advertise +components between installed packages and implement plugin systems. +For detailed usage, go to :doc:`entry_point`. Dependency management ===================== -``setuptools`` supports automatically installing dependencies when a package is -installed. The simplest way to include requirement specifiers is to use the -``install_requires`` argument to ``setup.cfg``. It takes a string or list of -strings containing requirement specifiers (A version specifier is one of the -operators <, >, <=, >=, == or !=, followed by a version identifier): +Packages built with ``setuptools`` can specify dependencies to be automatically +installed when the package itself is installed. +The example below show how to configure this kind of dependencies: + +.. tab:: setup.cfg + + .. code-block:: ini + + [options] + install_requires = + docutils + requests <= 0.4 + +.. tab:: setup.py [#setup.py]_ + + .. code-block:: python -.. code-block:: ini + setup( + # ... + install_requires=["docutils", "requests <= 0.4"], + # ... + ) - [options] - install_requires = - docutils >= 0.3 - requests <= 0.4 +.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_ + + .. code-block:: toml + + [project] + # ... + dependencies = [ + "docutils", + "requires <= 0.4", + ] + # ... + +Each dependency is represented by a string that can optionally contain version requirements +(e.g. one of the operators <, >, <=, >=, == or !=, followed by a version identifier), +and/or conditional environment markers, e.g. ``sys_platform == "win32"`` +(see :doc:`PyPUG:specifications/version-specifiers` for more information). When your project is installed, all of the dependencies not already installed will be located (via PyPI), downloaded, built (if necessary), and installed. -This, of course, is a simplified scenarios. ``setuptools`` also provide -additional keywords such as ``setup_requires`` that allows you to install -dependencies before running the script, and ``extras_requires`` that take -care of those needed by automatically generated scripts. It also provides -mechanisms to handle dependencies that are not in PyPI. For more advanced use, -see :doc:`dependency_management` +This, of course, is a simplified scenario. You can also specify groups of +extra dependencies that are not strictly required by your package to work, but +that will provide additional functionalities. +For more advanced use, see :doc:`dependency_management`. .. _Including Data Files: @@ -174,25 +291,43 @@ are placed in a platform-specific location. Setuptools offers three ways to specify data files to be included in your packages. For the simplest use, you can simply use the ``include_package_data`` keyword: -.. code-block:: ini +.. tab:: setup.cfg + + .. code-block:: ini + + [options] + include_package_data = True + +.. tab:: setup.py [#setup.py]_ + + .. code-block:: python + + setup( + # ... + include_package_data=True, + # ... + ) + +.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_ + + .. code-block:: toml - [options] - include_package_data = True + [tool.setuptools] + include-package-data = true + # This is already the default behaviour if your are using + # pyproject.toml to configure your build. + # You can deactivate that with `include-package-data = false` This tells setuptools to install any data files it finds in your packages. -The data files must be specified via the distutils' ``MANIFEST.in`` file. -For more details, see :doc:`datafiles` +The data files must be specified via the distutils' |MANIFEST.in|_ file +or automatically added by a :ref:`Revision Control System plugin +<Adding Support for Revision Control Systems>`. +For more details, see :doc:`datafiles`. Development mode ================ -.. tip:: - - Prior to :ref:`pip v21.1 <pip:v21-1>`, a ``setup.py`` script was - required to be compatible with development mode. With late - versions of pip, any project may be installed in this mode. - ``setuptools`` allows you to install a package without copying any files to your interpreter directory (e.g. the ``site-packages`` directory). This allows you to modify your source code and have the changes take @@ -204,13 +339,35 @@ Here's how to do it:: This creates a link file in your interpreter site package directory which associate with your source code. For more information, see :doc:`development_mode`. +.. tip:: + + Prior to :ref:`pip v21.1 <pip:v21-1>`, a ``setup.py`` script was + required to be compatible with development mode. With late + versions of pip, ``setup.cfg``-only projects may be installed in this mode. + + If you are experimenting with :doc:`configuration using <pyproject_config>`, + or have version of ``pip`` older than v21.1, you might need to keep a + ``setup.py`` file in file in your repository if you want to use editable + installs (for the time being). + + A simple script will suffice, for example: + + .. code-block:: python + + from setuptools import setup + + setup() + + You can still keep all the configuration in :doc:`setup.cfg </userguide/declarative_config>` + (or :doc:`pyproject.toml </userguide/pyproject_config>`). + Uploading your package to PyPI ============================== -After generating the distribution files, next step would be to upload your +After generating the distribution files, the next step would be to upload your distribution so others can use it. This functionality is provided by -`twine <https://pypi.org/project/twine/>`_ and we will only demonstrate the -basic use here. +:pypi:`twine` and is documented in the :doc:`Python packaging tutorial +<PyPUG:tutorials/packaging-projects>`. Transitioning from ``setup.py`` to ``setup.cfg`` @@ -218,12 +375,40 @@ Transitioning from ``setup.py`` to ``setup.cfg`` To avoid executing arbitrary scripts and boilerplate code, we are transitioning into a full-fledged ``setup.cfg`` to declare your package information instead of running ``setup()``. This inevitably brings challenges due to a different -syntax. Here we provide a quick guide to understanding how ``setup.cfg`` is -parsed by ``setuptool`` to ease the pain of transition. +syntax. :doc:`Here </userguide/declarative_config>` we provide a quick guide to +understanding how ``setup.cfg`` is parsed by ``setuptools`` to ease the pain of +transition. .. _packaging-resources: Resources on Python packaging ============================= -Packaging in Python is hard. Here we provide a list of links for those that -want to learn more. +Packaging in Python can be hard and is constantly evolving. +`Python Packaging User Guide <https://packaging.python.org>`_ has tutorials and +up-to-date references that can help you when it is time to distribute your work. + + +.. |MANIFEST.in| replace:: ``MANIFEST.in`` +.. _MANIFEST.in: https://packaging.python.org/en/latest/guides/using-manifest-in/ + + +---- + +.. rubric:: Notes + +.. [#setup.py] + The ``setup.py`` file should be used only when custom scripting during the + build is necessary. + Examples are kept in this document to help people interested in maintaining or + contributing to existing packages that use ``setup.py``. + Note that you can still keep most of configuration declarative in + :doc:`setup.cfg <declarative_config>` or :doc:`pyproject.toml + <pyproject_config>` and use ``setup.py`` only for the parts not + supported in those files (e.g. C extensions). + +.. [#experimental] + While the ``[build-system]`` table should always be specified in the + ``pyproject.toml`` file, support for adding package metadata and build configuration + options via the ``[project]`` and ``[tool.setuptools]`` tables is still + experimental and might change (or be completely removed) in future releases. + See :doc:`/userguide/pyproject_config`. diff --git a/exercises.py b/exercises.py new file mode 100644 index 00000000..76176be5 --- /dev/null +++ b/exercises.py @@ -0,0 +1,6 @@ +def measure_startup_perf(): + # run by pytest_perf + import subprocess + import sys # end warmup + + subprocess.check_call([sys.executable, '-c', 'pass']) diff --git a/pavement.py b/pavement.py deleted file mode 100644 index 81ff6f12..00000000 --- a/pavement.py +++ /dev/null @@ -1,70 +0,0 @@ -import re -import sys -import subprocess - -from paver.easy import task, path as Path - - -def remove_all(paths): - for path in paths: - path.rmtree() if path.isdir() else path.remove() - - -@task -def update_vendored(): - update_pkg_resources() - update_setuptools() - - -def rewrite_packaging(pkg_files, new_root): - """ - Rewrite imports in packaging to redirect to vendored copies. - """ - for file in pkg_files.glob('*.py'): - text = file.text() - text = re.sub(r' (pyparsing)', rf' {new_root}.\1', text) - text = text.replace( - 'from six.moves.urllib import parse', - 'from urllib import parse', - ) - file.write_text(text) - - -def clean(vendor): - """ - Remove all files out of the vendor directory except the meta - data (as pip uninstall doesn't support -t). - """ - remove_all( - path - for path in vendor.glob('*') - if path.basename() != 'vendored.txt' - ) - - -def install(vendor): - clean(vendor) - install_args = [ - sys.executable, - '-m', 'pip', - 'install', - '-r', str(vendor / 'vendored.txt'), - '-t', str(vendor), - ] - subprocess.check_call(install_args) - remove_all(vendor.glob('*.dist-info')) - remove_all(vendor.glob('*.egg-info')) - remove_all(vendor.glob('six.py')) - (vendor / '__init__.py').write_text('') - - -def update_pkg_resources(): - vendor = Path('pkg_resources/_vendor') - install(vendor) - rewrite_packaging(vendor / 'packaging', 'pkg_resources.extern') - - -def update_setuptools(): - vendor = Path('setuptools/_vendor') - install(vendor) - rewrite_packaging(vendor / 'packaging', 'setuptools.extern') diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 42129d5b..d59226af 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -71,12 +71,19 @@ try: except ImportError: importlib_machinery = None +from pkg_resources.extern.jaraco.text import ( + yield_lines, + drop_comment, + join_continuation, +) + from pkg_resources.extern import appdirs from pkg_resources.extern import packaging __import__('pkg_resources.extern.packaging.version') __import__('pkg_resources.extern.packaging.specifiers') __import__('pkg_resources.extern.packaging.requirements') __import__('pkg_resources.extern.packaging.markers') +__import__('pkg_resources.extern.packaging.utils') if sys.version_info < (3, 5): raise RuntimeError("Python 3.5 or later is required") @@ -548,6 +555,7 @@ class WorkingSet: self.entries = [] self.entry_keys = {} self.by_key = {} + self.normalized_to_canonical_keys = {} self.callbacks = [] if entries is None: @@ -628,6 +636,14 @@ class WorkingSet: is returned. """ dist = self.by_key.get(req.key) + + if dist is None: + canonical_key = self.normalized_to_canonical_keys.get(req.key) + + if canonical_key is not None: + req.key = canonical_key + dist = self.by_key.get(canonical_key) + if dist is not None and dist not in req: # XXX add more info raise VersionConflict(dist, req) @@ -696,6 +712,8 @@ class WorkingSet: return self.by_key[dist.key] = dist + normalized_name = packaging.utils.canonicalize_name(dist.key) + self.normalized_to_canonical_keys[normalized_name] = dist.key if dist.key not in keys: keys.append(dist.key) if dist.key not in keys2: @@ -916,14 +934,15 @@ class WorkingSet: def __getstate__(self): return ( self.entries[:], self.entry_keys.copy(), self.by_key.copy(), - self.callbacks[:] + self.normalized_to_canonical_keys.copy(), self.callbacks[:] ) - def __setstate__(self, e_k_b_c): - entries, keys, by_key, callbacks = e_k_b_c + def __setstate__(self, e_k_b_n_c): + entries, keys, by_key, normalized_to_canonical_keys, callbacks = e_k_b_n_c self.entries = entries[:] self.entry_keys = keys.copy() self.by_key = by_key.copy() + self.normalized_to_canonical_keys = normalized_to_canonical_keys.copy() self.callbacks = callbacks[:] @@ -1581,7 +1600,7 @@ class EggProvider(NullProvider): """Provider based on a virtual filesystem""" def __init__(self, module): - NullProvider.__init__(self, module) + super().__init__(module) self._setup_prefix() def _setup_prefix(self): @@ -1701,7 +1720,7 @@ class ZipProvider(EggProvider): _zip_manifests = MemoizedZipManifests() def __init__(self, module): - EggProvider.__init__(self, module) + super().__init__(module) self.zip_pre = self.loader.archive + os.sep def _zipinfo_name(self, fspath): @@ -2205,12 +2224,14 @@ def _handle_ns(packageName, path_item): # use find_spec (PEP 451) and fall-back to find_module (PEP 302) try: - loader = importer.find_spec(packageName).loader + spec = importer.find_spec(packageName) except AttributeError: # capture warnings due to #1111 with warnings.catch_warnings(): warnings.simplefilter("ignore") loader = importer.find_module(packageName) + else: + loader = spec.loader if spec else None if loader is None: return None @@ -2396,20 +2417,6 @@ def _set_parent_ns(packageName): setattr(sys.modules[parent], name, sys.modules[packageName]) -def yield_lines(strs): - """Yield non-empty/non-comment lines of a string or sequence""" - if isinstance(strs, str): - for s in strs.splitlines(): - s = s.strip() - # skip blank lines/comments - if s and not s.startswith('#'): - yield s - else: - for ss in strs: - for s in yield_lines(ss): - yield s - - MODULE = re.compile(r"\w+(\.\w+)*$").match EGG_NAME = re.compile( r""" @@ -3046,12 +3053,12 @@ class DistInfoDistribution(Distribution): if not req.marker or req.marker.evaluate({'extra': extra}): yield req - common = frozenset(reqs_for_extra(None)) + common = types.MappingProxyType(dict.fromkeys(reqs_for_extra(None))) dm[None].extend(common) for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: s_extra = safe_extra(extra.strip()) - dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) + dm[s_extra] = [r for r in reqs_for_extra(extra) if r not in common] return dm @@ -3077,25 +3084,12 @@ def issue_warning(*args, **kw): def parse_requirements(strs): - """Yield ``Requirement`` objects for each specification in `strs` + """ + Yield ``Requirement`` objects for each specification in `strs`. `strs` must be a string, or a (possibly-nested) iterable thereof. """ - # create a steppable iterator, so we can handle \-continuations - lines = iter(yield_lines(strs)) - - for line in lines: - # Drop comments -- a hash without a space may be in a URL. - if ' #' in line: - line = line[:line.find(' #')] - # If there is a line continuation, drop it, and append the next line. - if line.endswith('\\'): - line = line[:-2].strip() - try: - line += next(lines) - except StopIteration: - return - yield Requirement(line) + return map(Requirement, join_continuation(map(drop_comment, yield_lines(strs)))) class RequirementParseError(packaging.requirements.InvalidRequirement): diff --git a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/DESCRIPTION.rst b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..c605ec26 --- /dev/null +++ b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/DESCRIPTION.rst @@ -0,0 +1,227 @@ + +.. image:: https://secure.travis-ci.org/ActiveState/appdirs.png + :target: http://travis-ci.org/ActiveState/appdirs + +the problem +=========== + +What directory should your app use for storing user data? If running on Mac OS X, you +should use:: + + ~/Library/Application Support/<AppName> + +If on Windows (at least English Win XP) that should be:: + + C:\Documents and Settings\<User>\Application Data\Local Settings\<AppAuthor>\<AppName> + +or possibly:: + + C:\Documents and Settings\<User>\Application Data\<AppAuthor>\<AppName> + +for `roaming profiles <http://bit.ly/9yl3b6>`_ but that is another story. + +On Linux (and other Unices) the dir, according to the `XDG +spec <http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_, is:: + + ~/.local/share/<AppName> + + +``appdirs`` to the rescue +========================= + +This kind of thing is what the ``appdirs`` module is for. ``appdirs`` will +help you choose an appropriate: + +- user data dir (``user_data_dir``) +- user config dir (``user_config_dir``) +- user cache dir (``user_cache_dir``) +- site data dir (``site_data_dir``) +- site config dir (``site_config_dir``) +- user log dir (``user_log_dir``) + +and also: + +- is a single module so other Python packages can include their own private copy +- is slightly opinionated on the directory names used. Look for "OPINION" in + documentation and code for when an opinion is being applied. + + +some example output +=================== + +On Mac OS X:: + + >>> from appdirs import * + >>> appname = "SuperApp" + >>> appauthor = "Acme" + >>> user_data_dir(appname, appauthor) + '/Users/trentm/Library/Application Support/SuperApp' + >>> site_data_dir(appname, appauthor) + '/Library/Application Support/SuperApp' + >>> user_cache_dir(appname, appauthor) + '/Users/trentm/Library/Caches/SuperApp' + >>> user_log_dir(appname, appauthor) + '/Users/trentm/Library/Logs/SuperApp' + +On Windows 7:: + + >>> from appdirs import * + >>> appname = "SuperApp" + >>> appauthor = "Acme" + >>> user_data_dir(appname, appauthor) + 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp' + >>> user_data_dir(appname, appauthor, roaming=True) + 'C:\\Users\\trentm\\AppData\\Roaming\\Acme\\SuperApp' + >>> user_cache_dir(appname, appauthor) + 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Cache' + >>> user_log_dir(appname, appauthor) + 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Logs' + +On Linux:: + + >>> from appdirs import * + >>> appname = "SuperApp" + >>> appauthor = "Acme" + >>> user_data_dir(appname, appauthor) + '/home/trentm/.local/share/SuperApp + >>> site_data_dir(appname, appauthor) + '/usr/local/share/SuperApp' + >>> site_data_dir(appname, appauthor, multipath=True) + '/usr/local/share/SuperApp:/usr/share/SuperApp' + >>> user_cache_dir(appname, appauthor) + '/home/trentm/.cache/SuperApp' + >>> user_log_dir(appname, appauthor) + '/home/trentm/.cache/SuperApp/log' + >>> user_config_dir(appname) + '/home/trentm/.config/SuperApp' + >>> site_config_dir(appname) + '/etc/xdg/SuperApp' + >>> os.environ['XDG_CONFIG_DIRS'] = '/etc:/usr/local/etc' + >>> site_config_dir(appname, multipath=True) + '/etc/SuperApp:/usr/local/etc/SuperApp' + + +``AppDirs`` for convenience +=========================== + +:: + + >>> from appdirs import AppDirs + >>> dirs = AppDirs("SuperApp", "Acme") + >>> dirs.user_data_dir + '/Users/trentm/Library/Application Support/SuperApp' + >>> dirs.site_data_dir + '/Library/Application Support/SuperApp' + >>> dirs.user_cache_dir + '/Users/trentm/Library/Caches/SuperApp' + >>> dirs.user_log_dir + '/Users/trentm/Library/Logs/SuperApp' + + + +Per-version isolation +===================== + +If you have multiple versions of your app in use that you want to be +able to run side-by-side, then you may want version-isolation for these +dirs:: + + >>> from appdirs import AppDirs + >>> dirs = AppDirs("SuperApp", "Acme", version="1.0") + >>> dirs.user_data_dir + '/Users/trentm/Library/Application Support/SuperApp/1.0' + >>> dirs.site_data_dir + '/Library/Application Support/SuperApp/1.0' + >>> dirs.user_cache_dir + '/Users/trentm/Library/Caches/SuperApp/1.0' + >>> dirs.user_log_dir + '/Users/trentm/Library/Logs/SuperApp/1.0' + + + +appdirs Changelog +================= + +appdirs 1.4.3 +------------- +- [PR #76] Python 3.6 invalid escape sequence deprecation fixes +- Fix for Python 3.6 support + +appdirs 1.4.2 +------------- +- [PR #84] Allow installing without setuptools +- [PR #86] Fix string delimiters in setup.py description +- Add Python 3.6 support + +appdirs 1.4.1 +------------- +- [issue #38] Fix _winreg import on Windows Py3 +- [issue #55] Make appname optional + +appdirs 1.4.0 +------------- +- [PR #42] AppAuthor is now optional on Windows +- [issue 41] Support Jython on Windows, Mac, and Unix-like platforms. Windows + support requires `JNA <https://github.com/twall/jna>`_. +- [PR #44] Fix incorrect behaviour of the site_config_dir method + +appdirs 1.3.0 +------------- +- [Unix, issue 16] Conform to XDG standard, instead of breaking it for + everybody +- [Unix] Removes gratuitous case mangling of the case, since \*nix-es are + usually case sensitive, so mangling is not wise +- [Unix] Fixes the utterly wrong behaviour in ``site_data_dir``, return result + based on XDG_DATA_DIRS and make room for respecting the standard which + specifies XDG_DATA_DIRS is a multiple-value variable +- [Issue 6] Add ``*_config_dir`` which are distinct on nix-es, according to + XDG specs; on Windows and Mac return the corresponding ``*_data_dir`` + +appdirs 1.2.0 +------------- + +- [Unix] Put ``user_log_dir`` under the *cache* dir on Unix. Seems to be more + typical. +- [issue 9] Make ``unicode`` work on py3k. + +appdirs 1.1.0 +------------- + +- [issue 4] Add ``AppDirs.user_log_dir``. +- [Unix, issue 2, issue 7] appdirs now conforms to `XDG base directory spec + <http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_. +- [Mac, issue 5] Fix ``site_data_dir()`` on Mac. +- [Mac] Drop use of 'Carbon' module in favour of hardcoded paths; supports + Python3 now. +- [Windows] Append "Cache" to ``user_cache_dir`` on Windows by default. Use + ``opinion=False`` option to disable this. +- Add ``appdirs.AppDirs`` convenience class. Usage: + + >>> dirs = AppDirs("SuperApp", "Acme", version="1.0") + >>> dirs.user_data_dir + '/Users/trentm/Library/Application Support/SuperApp/1.0' + +- [Windows] Cherry-pick Komodo's change to downgrade paths to the Windows short + paths if there are high bit chars. +- [Linux] Change default ``user_cache_dir()`` on Linux to be singular, e.g. + "~/.superapp/cache". +- [Windows] Add ``roaming`` option to ``user_data_dir()`` (for use on Windows only) + and change the default ``user_data_dir`` behaviour to use a *non*-roaming + profile dir (``CSIDL_LOCAL_APPDATA`` instead of ``CSIDL_APPDATA``). Why? Because + a large roaming profile can cause login speed issues. The "only syncs on + logout" behaviour can cause surprises in appdata info. + + +appdirs 1.0.1 (never released) +------------------------------ + +Started this changelog 27 July 2010. Before that this module originated in the +`Komodo <http://www.activestate.com/komodo>`_ product as ``applib.py`` and then +as `applib/location.py +<http://github.com/ActiveState/applib/blob/master/applib/location.py>`_ (used by +`PyPM <http://code.activestate.com/pypm/>`_ in `ActivePython +<http://www.activestate.com/activepython>`_). This is basically a fork of +applib.py 1.0.1 and applib/location.py 1.0.1. + + + diff --git a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/INSTALLER b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/METADATA b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/METADATA new file mode 100644 index 00000000..69ddf934 --- /dev/null +++ b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/METADATA @@ -0,0 +1,254 @@ +Metadata-Version: 2.0 +Name: appdirs +Version: 1.4.3 +Summary: A small Python module for determining appropriate platform-specific dirs, e.g. a "user data dir". +Home-page: http://github.com/ActiveState/appdirs +Author: Trent Mick; Sridhar Ratnakumar; Jeff Rouse +Author-email: trentm@gmail.com; github@srid.name; jr@its.to +License: MIT +Keywords: application directory log cache user +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Topic :: Software Development :: Libraries :: Python Modules + + +.. image:: https://secure.travis-ci.org/ActiveState/appdirs.png + :target: http://travis-ci.org/ActiveState/appdirs + +the problem +=========== + +What directory should your app use for storing user data? If running on Mac OS X, you +should use:: + + ~/Library/Application Support/<AppName> + +If on Windows (at least English Win XP) that should be:: + + C:\Documents and Settings\<User>\Application Data\Local Settings\<AppAuthor>\<AppName> + +or possibly:: + + C:\Documents and Settings\<User>\Application Data\<AppAuthor>\<AppName> + +for `roaming profiles <http://bit.ly/9yl3b6>`_ but that is another story. + +On Linux (and other Unices) the dir, according to the `XDG +spec <http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_, is:: + + ~/.local/share/<AppName> + + +``appdirs`` to the rescue +========================= + +This kind of thing is what the ``appdirs`` module is for. ``appdirs`` will +help you choose an appropriate: + +- user data dir (``user_data_dir``) +- user config dir (``user_config_dir``) +- user cache dir (``user_cache_dir``) +- site data dir (``site_data_dir``) +- site config dir (``site_config_dir``) +- user log dir (``user_log_dir``) + +and also: + +- is a single module so other Python packages can include their own private copy +- is slightly opinionated on the directory names used. Look for "OPINION" in + documentation and code for when an opinion is being applied. + + +some example output +=================== + +On Mac OS X:: + + >>> from appdirs import * + >>> appname = "SuperApp" + >>> appauthor = "Acme" + >>> user_data_dir(appname, appauthor) + '/Users/trentm/Library/Application Support/SuperApp' + >>> site_data_dir(appname, appauthor) + '/Library/Application Support/SuperApp' + >>> user_cache_dir(appname, appauthor) + '/Users/trentm/Library/Caches/SuperApp' + >>> user_log_dir(appname, appauthor) + '/Users/trentm/Library/Logs/SuperApp' + +On Windows 7:: + + >>> from appdirs import * + >>> appname = "SuperApp" + >>> appauthor = "Acme" + >>> user_data_dir(appname, appauthor) + 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp' + >>> user_data_dir(appname, appauthor, roaming=True) + 'C:\\Users\\trentm\\AppData\\Roaming\\Acme\\SuperApp' + >>> user_cache_dir(appname, appauthor) + 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Cache' + >>> user_log_dir(appname, appauthor) + 'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Logs' + +On Linux:: + + >>> from appdirs import * + >>> appname = "SuperApp" + >>> appauthor = "Acme" + >>> user_data_dir(appname, appauthor) + '/home/trentm/.local/share/SuperApp + >>> site_data_dir(appname, appauthor) + '/usr/local/share/SuperApp' + >>> site_data_dir(appname, appauthor, multipath=True) + '/usr/local/share/SuperApp:/usr/share/SuperApp' + >>> user_cache_dir(appname, appauthor) + '/home/trentm/.cache/SuperApp' + >>> user_log_dir(appname, appauthor) + '/home/trentm/.cache/SuperApp/log' + >>> user_config_dir(appname) + '/home/trentm/.config/SuperApp' + >>> site_config_dir(appname) + '/etc/xdg/SuperApp' + >>> os.environ['XDG_CONFIG_DIRS'] = '/etc:/usr/local/etc' + >>> site_config_dir(appname, multipath=True) + '/etc/SuperApp:/usr/local/etc/SuperApp' + + +``AppDirs`` for convenience +=========================== + +:: + + >>> from appdirs import AppDirs + >>> dirs = AppDirs("SuperApp", "Acme") + >>> dirs.user_data_dir + '/Users/trentm/Library/Application Support/SuperApp' + >>> dirs.site_data_dir + '/Library/Application Support/SuperApp' + >>> dirs.user_cache_dir + '/Users/trentm/Library/Caches/SuperApp' + >>> dirs.user_log_dir + '/Users/trentm/Library/Logs/SuperApp' + + + +Per-version isolation +===================== + +If you have multiple versions of your app in use that you want to be +able to run side-by-side, then you may want version-isolation for these +dirs:: + + >>> from appdirs import AppDirs + >>> dirs = AppDirs("SuperApp", "Acme", version="1.0") + >>> dirs.user_data_dir + '/Users/trentm/Library/Application Support/SuperApp/1.0' + >>> dirs.site_data_dir + '/Library/Application Support/SuperApp/1.0' + >>> dirs.user_cache_dir + '/Users/trentm/Library/Caches/SuperApp/1.0' + >>> dirs.user_log_dir + '/Users/trentm/Library/Logs/SuperApp/1.0' + + + +appdirs Changelog +================= + +appdirs 1.4.3 +------------- +- [PR #76] Python 3.6 invalid escape sequence deprecation fixes +- Fix for Python 3.6 support + +appdirs 1.4.2 +------------- +- [PR #84] Allow installing without setuptools +- [PR #86] Fix string delimiters in setup.py description +- Add Python 3.6 support + +appdirs 1.4.1 +------------- +- [issue #38] Fix _winreg import on Windows Py3 +- [issue #55] Make appname optional + +appdirs 1.4.0 +------------- +- [PR #42] AppAuthor is now optional on Windows +- [issue 41] Support Jython on Windows, Mac, and Unix-like platforms. Windows + support requires `JNA <https://github.com/twall/jna>`_. +- [PR #44] Fix incorrect behaviour of the site_config_dir method + +appdirs 1.3.0 +------------- +- [Unix, issue 16] Conform to XDG standard, instead of breaking it for + everybody +- [Unix] Removes gratuitous case mangling of the case, since \*nix-es are + usually case sensitive, so mangling is not wise +- [Unix] Fixes the utterly wrong behaviour in ``site_data_dir``, return result + based on XDG_DATA_DIRS and make room for respecting the standard which + specifies XDG_DATA_DIRS is a multiple-value variable +- [Issue 6] Add ``*_config_dir`` which are distinct on nix-es, according to + XDG specs; on Windows and Mac return the corresponding ``*_data_dir`` + +appdirs 1.2.0 +------------- + +- [Unix] Put ``user_log_dir`` under the *cache* dir on Unix. Seems to be more + typical. +- [issue 9] Make ``unicode`` work on py3k. + +appdirs 1.1.0 +------------- + +- [issue 4] Add ``AppDirs.user_log_dir``. +- [Unix, issue 2, issue 7] appdirs now conforms to `XDG base directory spec + <http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_. +- [Mac, issue 5] Fix ``site_data_dir()`` on Mac. +- [Mac] Drop use of 'Carbon' module in favour of hardcoded paths; supports + Python3 now. +- [Windows] Append "Cache" to ``user_cache_dir`` on Windows by default. Use + ``opinion=False`` option to disable this. +- Add ``appdirs.AppDirs`` convenience class. Usage: + + >>> dirs = AppDirs("SuperApp", "Acme", version="1.0") + >>> dirs.user_data_dir + '/Users/trentm/Library/Application Support/SuperApp/1.0' + +- [Windows] Cherry-pick Komodo's change to downgrade paths to the Windows short + paths if there are high bit chars. +- [Linux] Change default ``user_cache_dir()`` on Linux to be singular, e.g. + "~/.superapp/cache". +- [Windows] Add ``roaming`` option to ``user_data_dir()`` (for use on Windows only) + and change the default ``user_data_dir`` behaviour to use a *non*-roaming + profile dir (``CSIDL_LOCAL_APPDATA`` instead of ``CSIDL_APPDATA``). Why? Because + a large roaming profile can cause login speed issues. The "only syncs on + logout" behaviour can cause surprises in appdata info. + + +appdirs 1.0.1 (never released) +------------------------------ + +Started this changelog 27 July 2010. Before that this module originated in the +`Komodo <http://www.activestate.com/komodo>`_ product as ``applib.py`` and then +as `applib/location.py +<http://github.com/ActiveState/applib/blob/master/applib/location.py>`_ (used by +`PyPM <http://code.activestate.com/pypm/>`_ in `ActivePython +<http://www.activestate.com/activepython>`_). This is basically a fork of +applib.py 1.0.1 and applib/location.py 1.0.1. + + + diff --git a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/RECORD b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/RECORD new file mode 100644 index 00000000..3f45ff59 --- /dev/null +++ b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/RECORD @@ -0,0 +1,10 @@ +__pycache__/appdirs.cpython-310.pyc,,
+appdirs-1.4.3.dist-info/DESCRIPTION.rst,sha256=77Fe8OIOLSjDSNdLiL5xywMKO-AGE42rdXkqKo4Ee-k,7531
+appdirs-1.4.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+appdirs-1.4.3.dist-info/METADATA,sha256=3IFw6jTfImdOqsCb2GYvVR157tL7KEzfRAszn382csk,8773
+appdirs-1.4.3.dist-info/RECORD,,
+appdirs-1.4.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+appdirs-1.4.3.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110
+appdirs-1.4.3.dist-info/metadata.json,sha256=fL_Q-GuFJu3PJxMrwU7SdsI8RGqjIfi2AvouCSF5DSA,1359
+appdirs-1.4.3.dist-info/top_level.txt,sha256=nKncE8CUqZERJ6VuQWL4_bkunSPDNfn7KZqb4Tr5YEM,8
+appdirs.py,sha256=MievUEuv3l_mQISH5SF0shDk_BNhHHzYiAPrT3ITN4I,24701
diff --git a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/REQUESTED b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/REQUESTED diff --git a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/WHEEL b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/WHEEL new file mode 100644 index 00000000..8b6dd1b5 --- /dev/null +++ b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/metadata.json b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/metadata.json new file mode 100644 index 00000000..da1e5f3a --- /dev/null +++ b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: Implementation :: PyPy", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Software Development :: Libraries :: Python Modules"], "extensions": {"python.details": {"contacts": [{"email": "trentm@gmail.com; github@srid.name; jr@its.to", "name": "Trent Mick; Sridhar Ratnakumar; Jeff Rouse", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://github.com/ActiveState/appdirs"}}}, "generator": "bdist_wheel (0.29.0)", "keywords": ["application", "directory", "log", "cache", "user"], "license": "MIT", "metadata_version": "2.0", "name": "appdirs", "summary": "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\".", "test_requires": [{"requires": []}], "version": "1.4.3"}
\ No newline at end of file diff --git a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/top_level.txt b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/top_level.txt new file mode 100644 index 00000000..d64bc321 --- /dev/null +++ b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/top_level.txt @@ -0,0 +1 @@ +appdirs diff --git a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/INSTALLER b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/LICENSE b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/LICENSE new file mode 100644 index 00000000..378b991a --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/LICENSE @@ -0,0 +1,13 @@ +Copyright 2017-2019 Brett Cannon, Barry Warsaw + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/METADATA b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/METADATA new file mode 100644 index 00000000..cdb1e783 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/METADATA @@ -0,0 +1,86 @@ +Metadata-Version: 2.1 +Name: importlib-resources +Version: 5.4.0 +Summary: Read resources from Python packages +Home-page: https://github.com/python/importlib_resources +Author: Barry Warsaw +Author-email: barry@python.org +License: UNKNOWN +Project-URL: Documentation, https://importlib-resources.readthedocs.io/ +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.6 +License-File: LICENSE +Requires-Dist: zipp (>=3.1.0) ; python_version < "3.10" +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/importlib_resources.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/importlib_resources.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/importlib_resources + +.. image:: https://github.com/python/importlib_resources/workflows/tests/badge.svg + :target: https://github.com/python/importlib_resources/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://readthedocs.org/projects/importlib-resources/badge/?version=latest + :target: https://importlib-resources.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2021-informational + :target: https://blog.jaraco.com/skeleton + +``importlib_resources`` is a backport of Python standard library +`importlib.resources +<https://docs.python.org/3/library/importlib.html#module-importlib.resources>`_ +module for older Pythons. + +The key goal of this module is to replace parts of `pkg_resources +<https://setuptools.readthedocs.io/en/latest/pkg_resources.html>`_ with a +solution in Python's stdlib that relies on well-defined APIs. This makes +reading resources included in packages easier, with more stable and consistent +semantics. + +Compatibility +============= + +New features are introduced in this third-party library and later merged +into CPython. The following table indicates which versions of this library +were contributed to different versions in the standard library: + +.. list-table:: + :header-rows: 1 + + * - importlib_resources + - stdlib + * - 5.2 + - 3.11 + * - 5.0 + - 3.10 + * - 1.3 + - 3.9 + * - 0.5 (?) + - 3.7 + + diff --git a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/RECORD b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/RECORD new file mode 100644 index 00000000..7a68a2f2 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/RECORD @@ -0,0 +1,75 @@ +importlib_resources-5.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+importlib_resources-5.4.0.dist-info/LICENSE,sha256=uWRjFdYGataJX2ziXk048ItUglQmjng3GWBALaWA36U,568
+importlib_resources-5.4.0.dist-info/METADATA,sha256=i5jH25IbM0Ls6u6UzSSCOa0c8hpDvePxqgnQwh2T5Io,3135
+importlib_resources-5.4.0.dist-info/RECORD,,
+importlib_resources-5.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources-5.4.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
+importlib_resources-5.4.0.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20
+importlib_resources/__init__.py,sha256=zuA0lbRgtVVCcAztM0z5LuBiOCV9L_3qtI6mW2p5xAg,525
+importlib_resources/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/__pycache__/_adapters.cpython-310.pyc,,
+importlib_resources/__pycache__/_common.cpython-310.pyc,,
+importlib_resources/__pycache__/_compat.cpython-310.pyc,,
+importlib_resources/__pycache__/_itertools.cpython-310.pyc,,
+importlib_resources/__pycache__/_legacy.cpython-310.pyc,,
+importlib_resources/__pycache__/abc.cpython-310.pyc,,
+importlib_resources/__pycache__/readers.cpython-310.pyc,,
+importlib_resources/__pycache__/simple.cpython-310.pyc,,
+importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504
+importlib_resources/_common.py,sha256=iIxAaQhotSh6TLLUEfL_ynU2fzEeyHMz9JcL46mUhLg,2741
+importlib_resources/_compat.py,sha256=3LpkIfeN9x4oXjRea5TxZP5VYhPlzuVRhGe-hEv-S0s,2704
+importlib_resources/_itertools.py,sha256=WCdJ1Gs_kNFwKENyIG7TO0Y434IWCu0zjVVSsSbZwU8,884
+importlib_resources/_legacy.py,sha256=TMLkx6aEM6U8xIREPXqGZrMbUhTiPUuPl6ESD7RdYj4,3494
+importlib_resources/abc.py,sha256=MvTJJXajbl74s36Gyeesf76egtbFnh-TMtzQMVhFWXo,3886
+importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/readers.py,sha256=_9QLGQ5AzrED3PY8S2Zf8V6yLR0-nqqYqtQmgleDJzY,3566
+importlib_resources/simple.py,sha256=xt0qhXbwt3bZ86zuaaKbTiE9A0mDbwu0saRjUq_pcY0,2836
+importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/_compat.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/test_contents.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/test_files.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/test_open.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/test_path.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/test_read.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/test_reader.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/test_resource.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/update-zips.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/util.cpython-310.pyc,,
+importlib_resources/tests/_compat.py,sha256=QGI_4p0DXybypoYvw0kr3jfQqvls3p8u4wy4Wvf0Z_o,435
+importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data01/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
+importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
+importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/one/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13
+importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/two/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13
+importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
+importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
+importlib_resources/tests/test_compatibilty_files.py,sha256=NWkbIsylI8Wz3Dwsxo1quT4ZI6ToXFA2mojCG6Dzuxw,3260
+importlib_resources/tests/test_contents.py,sha256=V1Xfk3lqTDdvUsZuV18Kndf0CT_tkM2oEIwk9Vv0rhg,968
+importlib_resources/tests/test_files.py,sha256=1Nqv6VM_MjfwrmtXYL1a1CMT0QhCxi3hNMqwXlfMQTg,1184
+importlib_resources/tests/test_open.py,sha256=pmEgdrSFdM83L6FxtR8U_RT9BfI3JZ4snGmM_ZZIegY,2565
+importlib_resources/tests/test_path.py,sha256=xvPteNA-UKavDhKgLgrQuXSxKWYH7Q4nSNDVfBX95Gs,2103
+importlib_resources/tests/test_read.py,sha256=EyYvpHJ_7F4LuX2EU_c5EerIBQfRhOFmiIR7LOc5Y5E,2408
+importlib_resources/tests/test_reader.py,sha256=hgXHquqAEnioemv20ZZcDlVaiOrcZKADO37_FkiQ00Y,4286
+importlib_resources/tests/test_resource.py,sha256=DqfLNc9kaN5obqxU8kn0sRUWMf9MygagrpfMV5-QfWg,8145
+importlib_resources/tests/update-zips.py,sha256=x3iJVqWnMM5qp4Oob2Pl3o6Yi03sUjEv_5Wf-UCg3ps,1415
+importlib_resources/tests/util.py,sha256=X1j-0C96pu3_tmtJuLhzfBfcfMenOphDLkxtCt5j7t4,5309
+importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=z5Of4dsv3T0t-46B0MsVhxlhsPGMz28aUhJDWpj3_oY,876
+importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=ydI-_j-xgQ7tDxqBp9cjOqXBGxUp6ZBbwVJu6Xj-nrY,698
diff --git a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/REQUESTED b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/REQUESTED diff --git a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/WHEEL b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/WHEEL new file mode 100644 index 00000000..5bad85fd --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt new file mode 100644 index 00000000..58ad1bd3 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt @@ -0,0 +1 @@ +importlib_resources diff --git a/pkg_resources/_vendor/importlib_resources/__init__.py b/pkg_resources/_vendor/importlib_resources/__init__.py new file mode 100644 index 00000000..34e3a995 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/__init__.py @@ -0,0 +1,36 @@ +"""Read resources contained within a package.""" + +from ._common import ( + as_file, + files, + Package, +) + +from ._legacy import ( + contents, + open_binary, + read_binary, + open_text, + read_text, + is_resource, + path, + Resource, +) + +from .abc import ResourceReader + + +__all__ = [ + 'Package', + 'Resource', + 'ResourceReader', + 'as_file', + 'contents', + 'files', + 'is_resource', + 'open_binary', + 'open_text', + 'path', + 'read_binary', + 'read_text', +] diff --git a/pkg_resources/_vendor/importlib_resources/_adapters.py b/pkg_resources/_vendor/importlib_resources/_adapters.py new file mode 100644 index 00000000..ea363d86 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/_adapters.py @@ -0,0 +1,170 @@ +from contextlib import suppress +from io import TextIOWrapper + +from . import abc + + +class SpecLoaderAdapter: + """ + Adapt a package spec to adapt the underlying loader. + """ + + def __init__(self, spec, adapter=lambda spec: spec.loader): + self.spec = spec + self.loader = adapter(spec) + + def __getattr__(self, name): + return getattr(self.spec, name) + + +class TraversableResourcesLoader: + """ + Adapt a loader to provide TraversableResources. + """ + + def __init__(self, spec): + self.spec = spec + + def get_resource_reader(self, name): + return CompatibilityFiles(self.spec)._native() + + +def _io_wrapper(file, mode='r', *args, **kwargs): + if mode == 'r': + return TextIOWrapper(file, *args, **kwargs) + elif mode == 'rb': + return file + raise ValueError( + "Invalid mode value '{}', only 'r' and 'rb' are supported".format(mode) + ) + + +class CompatibilityFiles: + """ + Adapter for an existing or non-existent resource reader + to provide a compatibility .files(). + """ + + class SpecPath(abc.Traversable): + """ + Path tied to a module spec. + Can be read and exposes the resource reader children. + """ + + def __init__(self, spec, reader): + self._spec = spec + self._reader = reader + + def iterdir(self): + if not self._reader: + return iter(()) + return iter( + CompatibilityFiles.ChildPath(self._reader, path) + for path in self._reader.contents() + ) + + def is_file(self): + return False + + is_dir = is_file + + def joinpath(self, other): + if not self._reader: + return CompatibilityFiles.OrphanPath(other) + return CompatibilityFiles.ChildPath(self._reader, other) + + @property + def name(self): + return self._spec.name + + def open(self, mode='r', *args, **kwargs): + return _io_wrapper(self._reader.open_resource(None), mode, *args, **kwargs) + + class ChildPath(abc.Traversable): + """ + Path tied to a resource reader child. + Can be read but doesn't expose any meaningful children. + """ + + def __init__(self, reader, name): + self._reader = reader + self._name = name + + def iterdir(self): + return iter(()) + + def is_file(self): + return self._reader.is_resource(self.name) + + def is_dir(self): + return not self.is_file() + + def joinpath(self, other): + return CompatibilityFiles.OrphanPath(self.name, other) + + @property + def name(self): + return self._name + + def open(self, mode='r', *args, **kwargs): + return _io_wrapper( + self._reader.open_resource(self.name), mode, *args, **kwargs + ) + + class OrphanPath(abc.Traversable): + """ + Orphan path, not tied to a module spec or resource reader. + Can't be read and doesn't expose any meaningful children. + """ + + def __init__(self, *path_parts): + if len(path_parts) < 1: + raise ValueError('Need at least one path part to construct a path') + self._path = path_parts + + def iterdir(self): + return iter(()) + + def is_file(self): + return False + + is_dir = is_file + + def joinpath(self, other): + return CompatibilityFiles.OrphanPath(*self._path, other) + + @property + def name(self): + return self._path[-1] + + def open(self, mode='r', *args, **kwargs): + raise FileNotFoundError("Can't open orphan path") + + def __init__(self, spec): + self.spec = spec + + @property + def _reader(self): + with suppress(AttributeError): + return self.spec.loader.get_resource_reader(self.spec.name) + + def _native(self): + """ + Return the native reader if it supports files(). + """ + reader = self._reader + return reader if hasattr(reader, 'files') else self + + def __getattr__(self, attr): + return getattr(self._reader, attr) + + def files(self): + return CompatibilityFiles.SpecPath(self.spec, self._reader) + + +def wrap_spec(package): + """ + Construct a package spec with traversable compatibility + on the spec/loader/reader. + """ + return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader) diff --git a/pkg_resources/_vendor/importlib_resources/_common.py b/pkg_resources/_vendor/importlib_resources/_common.py new file mode 100644 index 00000000..a12e2c75 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/_common.py @@ -0,0 +1,104 @@ +import os +import pathlib +import tempfile +import functools +import contextlib +import types +import importlib + +from typing import Union, Optional +from .abc import ResourceReader, Traversable + +from ._compat import wrap_spec + +Package = Union[types.ModuleType, str] + + +def files(package): + # type: (Package) -> Traversable + """ + Get a Traversable resource from a package + """ + return from_package(get_package(package)) + + +def get_resource_reader(package): + # type: (types.ModuleType) -> Optional[ResourceReader] + """ + Return the package's loader if it's a ResourceReader. + """ + # We can't use + # a issubclass() check here because apparently abc.'s __subclasscheck__() + # hook wants to create a weak reference to the object, but + # zipimport.zipimporter does not support weak references, resulting in a + # TypeError. That seems terrible. + spec = package.__spec__ + reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore + if reader is None: + return None + return reader(spec.name) # type: ignore + + +def resolve(cand): + # type: (Package) -> types.ModuleType + return cand if isinstance(cand, types.ModuleType) else importlib.import_module(cand) + + +def get_package(package): + # type: (Package) -> types.ModuleType + """Take a package name or module object and return the module. + + Raise an exception if the resolved module is not a package. + """ + resolved = resolve(package) + if wrap_spec(resolved).submodule_search_locations is None: + raise TypeError(f'{package!r} is not a package') + return resolved + + +def from_package(package): + """ + Return a Traversable object for the given package. + + """ + spec = wrap_spec(package) + reader = spec.loader.get_resource_reader(spec.name) + return reader.files() + + +@contextlib.contextmanager +def _tempfile(reader, suffix=''): + # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' + # blocks due to the need to close the temporary file to work on Windows + # properly. + fd, raw_path = tempfile.mkstemp(suffix=suffix) + try: + try: + os.write(fd, reader()) + finally: + os.close(fd) + del reader + yield pathlib.Path(raw_path) + finally: + try: + os.remove(raw_path) + except FileNotFoundError: + pass + + +@functools.singledispatch +def as_file(path): + """ + Given a Traversable object, return that object as a + path on the local file system in a context manager. + """ + return _tempfile(path.read_bytes, suffix=path.name) + + +@as_file.register(pathlib.Path) +@contextlib.contextmanager +def _(path): + """ + Degenerate behavior for pathlib.Path objects. + """ + yield path diff --git a/pkg_resources/_vendor/importlib_resources/_compat.py b/pkg_resources/_vendor/importlib_resources/_compat.py new file mode 100644 index 00000000..cb9fc820 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/_compat.py @@ -0,0 +1,98 @@ +# flake8: noqa + +import abc +import sys +import pathlib +from contextlib import suppress + +if sys.version_info >= (3, 10): + from zipfile import Path as ZipPath # type: ignore +else: + from ..zipp import Path as ZipPath # type: ignore + + +try: + from typing import runtime_checkable # type: ignore +except ImportError: + + def runtime_checkable(cls): # type: ignore + return cls + + +try: + from typing import Protocol # type: ignore +except ImportError: + Protocol = abc.ABC # type: ignore + + +class TraversableResourcesLoader: + """ + Adapt loaders to provide TraversableResources and other + compatibility. + + Used primarily for Python 3.9 and earlier where the native + loaders do not yet implement TraversableResources. + """ + + def __init__(self, spec): + self.spec = spec + + @property + def path(self): + return self.spec.origin + + def get_resource_reader(self, name): + from . import readers, _adapters + + def _zip_reader(spec): + with suppress(AttributeError): + return readers.ZipReader(spec.loader, spec.name) + + def _namespace_reader(spec): + with suppress(AttributeError, ValueError): + return readers.NamespaceReader(spec.submodule_search_locations) + + def _available_reader(spec): + with suppress(AttributeError): + return spec.loader.get_resource_reader(spec.name) + + def _native_reader(spec): + reader = _available_reader(spec) + return reader if hasattr(reader, 'files') else None + + def _file_reader(spec): + try: + path = pathlib.Path(self.path) + except TypeError: + return None + if path.exists(): + return readers.FileReader(self) + + return ( + # native reader if it supplies 'files' + _native_reader(self.spec) + or + # local ZipReader if a zip module + _zip_reader(self.spec) + or + # local NamespaceReader if a namespace module + _namespace_reader(self.spec) + or + # local FileReader + _file_reader(self.spec) + # fallback - adapt the spec ResourceReader to TraversableReader + or _adapters.CompatibilityFiles(self.spec) + ) + + +def wrap_spec(package): + """ + Construct a package spec with traversable compatibility + on the spec/loader/reader. + + Supersedes _adapters.wrap_spec to use TraversableResourcesLoader + from above for older Python compatibility (<3.10). + """ + from . import _adapters + + return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader) diff --git a/pkg_resources/_vendor/importlib_resources/_itertools.py b/pkg_resources/_vendor/importlib_resources/_itertools.py new file mode 100644 index 00000000..cce05582 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/_itertools.py @@ -0,0 +1,35 @@ +from itertools import filterfalse + +from typing import ( + Callable, + Iterable, + Iterator, + Optional, + Set, + TypeVar, + Union, +) + +# Type and type variable definitions +_T = TypeVar('_T') +_U = TypeVar('_U') + + +def unique_everseen( + iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = None +) -> Iterator[_T]: + "List unique elements, preserving order. Remember all elements ever seen." + # unique_everseen('AAAABBBCCDAABBB') --> A B C D + # unique_everseen('ABBCcAD', str.lower) --> A B C D + seen: Set[Union[_T, _U]] = set() + seen_add = seen.add + if key is None: + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element diff --git a/pkg_resources/_vendor/importlib_resources/_legacy.py b/pkg_resources/_vendor/importlib_resources/_legacy.py new file mode 100644 index 00000000..1d5d3f1f --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/_legacy.py @@ -0,0 +1,121 @@ +import functools +import os +import pathlib +import types +import warnings + +from typing import Union, Iterable, ContextManager, BinaryIO, TextIO, Any + +from . import _common + +Package = Union[types.ModuleType, str] +Resource = str + + +def deprecated(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + warnings.warn( + f"{func.__name__} is deprecated. Use files() instead. " + "Refer to https://importlib-resources.readthedocs.io" + "/en/latest/using.html#migrating-from-legacy for migration advice.", + DeprecationWarning, + stacklevel=2, + ) + return func(*args, **kwargs) + + return wrapper + + +def normalize_path(path): + # type: (Any) -> str + """Normalize a path by ensuring it is a string. + + If the resulting string contains path separators, an exception is raised. + """ + str_path = str(path) + parent, file_name = os.path.split(str_path) + if parent: + raise ValueError(f'{path!r} must be only a file name') + return file_name + + +@deprecated +def open_binary(package: Package, resource: Resource) -> BinaryIO: + """Return a file-like object opened for binary reading of the resource.""" + return (_common.files(package) / normalize_path(resource)).open('rb') + + +@deprecated +def read_binary(package: Package, resource: Resource) -> bytes: + """Return the binary contents of the resource.""" + return (_common.files(package) / normalize_path(resource)).read_bytes() + + +@deprecated +def open_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict', +) -> TextIO: + """Return a file-like object opened for text reading of the resource.""" + return (_common.files(package) / normalize_path(resource)).open( + 'r', encoding=encoding, errors=errors + ) + + +@deprecated +def read_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict', +) -> str: + """Return the decoded string of the resource. + + The decoding-related arguments have the same semantics as those of + bytes.decode(). + """ + with open_text(package, resource, encoding, errors) as fp: + return fp.read() + + +@deprecated +def contents(package: Package) -> Iterable[str]: + """Return an iterable of entries in `package`. + + Note that not all entries are resources. Specifically, directories are + not considered resources. Use `is_resource()` on each entry returned here + to check if it is a resource or not. + """ + return [path.name for path in _common.files(package).iterdir()] + + +@deprecated +def is_resource(package: Package, name: str) -> bool: + """True if `name` is a resource inside `package`. + + Directories are *not* resources. + """ + resource = normalize_path(name) + return any( + traversable.name == resource and traversable.is_file() + for traversable in _common.files(package).iterdir() + ) + + +@deprecated +def path( + package: Package, + resource: Resource, +) -> ContextManager[pathlib.Path]: + """A context manager providing a file path object to the resource. + + If the resource does not already exist on its own on the file system, + a temporary file will be created. If the file was created, the file + will be deleted upon exiting the context manager (no exception is + raised if the file was deleted prior to the context manager + exiting). + """ + return _common.as_file(_common.files(package) / normalize_path(resource)) diff --git a/pkg_resources/_vendor/importlib_resources/abc.py b/pkg_resources/_vendor/importlib_resources/abc.py new file mode 100644 index 00000000..d39dc1ad --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/abc.py @@ -0,0 +1,137 @@ +import abc +from typing import BinaryIO, Iterable, Text + +from ._compat import runtime_checkable, Protocol + + +class ResourceReader(metaclass=abc.ABCMeta): + """Abstract base class for loaders to provide resource reading support.""" + + @abc.abstractmethod + def open_resource(self, resource: Text) -> BinaryIO: + """Return an opened, file-like object for binary reading. + + The 'resource' argument is expected to represent only a file name. + If the resource cannot be found, FileNotFoundError is raised. + """ + # This deliberately raises FileNotFoundError instead of + # NotImplementedError so that if this method is accidentally called, + # it'll still do the right thing. + raise FileNotFoundError + + @abc.abstractmethod + def resource_path(self, resource: Text) -> Text: + """Return the file system path to the specified resource. + + The 'resource' argument is expected to represent only a file name. + If the resource does not exist on the file system, raise + FileNotFoundError. + """ + # This deliberately raises FileNotFoundError instead of + # NotImplementedError so that if this method is accidentally called, + # it'll still do the right thing. + raise FileNotFoundError + + @abc.abstractmethod + def is_resource(self, path: Text) -> bool: + """Return True if the named 'path' is a resource. + + Files are resources, directories are not. + """ + raise FileNotFoundError + + @abc.abstractmethod + def contents(self) -> Iterable[str]: + """Return an iterable of entries in `package`.""" + raise FileNotFoundError + + +@runtime_checkable +class Traversable(Protocol): + """ + An object with a subset of pathlib.Path methods suitable for + traversing directories and opening files. + """ + + @abc.abstractmethod + def iterdir(self): + """ + Yield Traversable objects in self + """ + + def read_bytes(self): + """ + Read contents of self as bytes + """ + with self.open('rb') as strm: + return strm.read() + + def read_text(self, encoding=None): + """ + Read contents of self as text + """ + with self.open(encoding=encoding) as strm: + return strm.read() + + @abc.abstractmethod + def is_dir(self) -> bool: + """ + Return True if self is a directory + """ + + @abc.abstractmethod + def is_file(self) -> bool: + """ + Return True if self is a file + """ + + @abc.abstractmethod + def joinpath(self, child): + """ + Return Traversable child in self + """ + + def __truediv__(self, child): + """ + Return Traversable child in self + """ + return self.joinpath(child) + + @abc.abstractmethod + def open(self, mode='r', *args, **kwargs): + """ + mode may be 'r' or 'rb' to open as text or binary. Return a handle + suitable for reading (same as pathlib.Path.open). + + When opening as text, accepts encoding parameters such as those + accepted by io.TextIOWrapper. + """ + + @abc.abstractproperty + def name(self) -> str: + """ + The base name of this object without any parent references. + """ + + +class TraversableResources(ResourceReader): + """ + The required interface for providing traversable + resources. + """ + + @abc.abstractmethod + def files(self): + """Return a Traversable object for the loaded package.""" + + def open_resource(self, resource): + return self.files().joinpath(resource).open('rb') + + def resource_path(self, resource): + raise FileNotFoundError(resource) + + def is_resource(self, path): + return self.files().joinpath(path).is_file() + + def contents(self): + return (item.name for item in self.files().iterdir()) diff --git a/pkg_resources/_vendor/importlib_resources/py.typed b/pkg_resources/_vendor/importlib_resources/py.typed new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/py.typed diff --git a/pkg_resources/_vendor/importlib_resources/readers.py b/pkg_resources/_vendor/importlib_resources/readers.py new file mode 100644 index 00000000..f1190ca4 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/readers.py @@ -0,0 +1,122 @@ +import collections +import pathlib +import operator + +from . import abc + +from ._itertools import unique_everseen +from ._compat import ZipPath + + +def remove_duplicates(items): + return iter(collections.OrderedDict.fromkeys(items)) + + +class FileReader(abc.TraversableResources): + def __init__(self, loader): + self.path = pathlib.Path(loader.path).parent + + def resource_path(self, resource): + """ + Return the file system path to prevent + `resources.path()` from creating a temporary + copy. + """ + return str(self.path.joinpath(resource)) + + def files(self): + return self.path + + +class ZipReader(abc.TraversableResources): + def __init__(self, loader, module): + _, _, name = module.rpartition('.') + self.prefix = loader.prefix.replace('\\', '/') + name + '/' + self.archive = loader.archive + + def open_resource(self, resource): + try: + return super().open_resource(resource) + except KeyError as exc: + raise FileNotFoundError(exc.args[0]) + + def is_resource(self, path): + # workaround for `zipfile.Path.is_file` returning true + # for non-existent paths. + target = self.files().joinpath(path) + return target.is_file() and target.exists() + + def files(self): + return ZipPath(self.archive, self.prefix) + + +class MultiplexedPath(abc.Traversable): + """ + Given a series of Traversable objects, implement a merged + version of the interface across all objects. Useful for + namespace packages which may be multihomed at a single + name. + """ + + def __init__(self, *paths): + self._paths = list(map(pathlib.Path, remove_duplicates(paths))) + if not self._paths: + message = 'MultiplexedPath must contain at least one path' + raise FileNotFoundError(message) + if not all(path.is_dir() for path in self._paths): + raise NotADirectoryError('MultiplexedPath only supports directories') + + def iterdir(self): + files = (file for path in self._paths for file in path.iterdir()) + return unique_everseen(files, key=operator.attrgetter('name')) + + def read_bytes(self): + raise FileNotFoundError(f'{self} is not a file') + + def read_text(self, *args, **kwargs): + raise FileNotFoundError(f'{self} is not a file') + + def is_dir(self): + return True + + def is_file(self): + return False + + def joinpath(self, child): + # first try to find child in current paths + for file in self.iterdir(): + if file.name == child: + return file + # if it does not exist, construct it with the first path + return self._paths[0] / child + + __truediv__ = joinpath + + def open(self, *args, **kwargs): + raise FileNotFoundError(f'{self} is not a file') + + @property + def name(self): + return self._paths[0].name + + def __repr__(self): + paths = ', '.join(f"'{path}'" for path in self._paths) + return f'MultiplexedPath({paths})' + + +class NamespaceReader(abc.TraversableResources): + def __init__(self, namespace_path): + if 'NamespacePath' not in str(namespace_path): + raise ValueError('Invalid path') + self.path = MultiplexedPath(*list(namespace_path)) + + def resource_path(self, resource): + """ + Return the file system path to prevent + `resources.path()` from creating a temporary + copy. + """ + return str(self.path.joinpath(resource)) + + def files(self): + return self.path diff --git a/pkg_resources/_vendor/importlib_resources/simple.py b/pkg_resources/_vendor/importlib_resources/simple.py new file mode 100644 index 00000000..da073cbd --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/simple.py @@ -0,0 +1,116 @@ +""" +Interface adapters for low-level readers. +""" + +import abc +import io +import itertools +from typing import BinaryIO, List + +from .abc import Traversable, TraversableResources + + +class SimpleReader(abc.ABC): + """ + The minimum, low-level interface required from a resource + provider. + """ + + @abc.abstractproperty + def package(self): + # type: () -> str + """ + The name of the package for which this reader loads resources. + """ + + @abc.abstractmethod + def children(self): + # type: () -> List['SimpleReader'] + """ + Obtain an iterable of SimpleReader for available + child containers (e.g. directories). + """ + + @abc.abstractmethod + def resources(self): + # type: () -> List[str] + """ + Obtain available named resources for this virtual package. + """ + + @abc.abstractmethod + def open_binary(self, resource): + # type: (str) -> BinaryIO + """ + Obtain a File-like for a named resource. + """ + + @property + def name(self): + return self.package.split('.')[-1] + + +class ResourceHandle(Traversable): + """ + Handle to a named resource in a ResourceReader. + """ + + def __init__(self, parent, name): + # type: (ResourceContainer, str) -> None + self.parent = parent + self.name = name # type: ignore + + def is_file(self): + return True + + def is_dir(self): + return False + + def open(self, mode='r', *args, **kwargs): + stream = self.parent.reader.open_binary(self.name) + if 'b' not in mode: + stream = io.TextIOWrapper(*args, **kwargs) + return stream + + def joinpath(self, name): + raise RuntimeError("Cannot traverse into a resource") + + +class ResourceContainer(Traversable): + """ + Traversable container for a package's resources via its reader. + """ + + def __init__(self, reader): + # type: (SimpleReader) -> None + self.reader = reader + + def is_dir(self): + return True + + def is_file(self): + return False + + def iterdir(self): + files = (ResourceHandle(self, name) for name in self.reader.resources) + dirs = map(ResourceContainer, self.reader.children()) + return itertools.chain(files, dirs) + + def open(self, *args, **kwargs): + raise IsADirectoryError() + + def joinpath(self, name): + return next( + traversable for traversable in self.iterdir() if traversable.name == name + ) + + +class TraversableReader(TraversableResources, SimpleReader): + """ + A TraversableResources based on SimpleReader. Resource providers + may derive from this class to provide the TraversableResources + interface by supplying the SimpleReader interface. + """ + + def files(self): + return ResourceContainer(self) diff --git a/pkg_resources/_vendor/importlib_resources/tests/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/__init__.py diff --git a/pkg_resources/_vendor/importlib_resources/tests/_compat.py b/pkg_resources/_vendor/importlib_resources/tests/_compat.py new file mode 100644 index 00000000..4c99cffd --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/_compat.py @@ -0,0 +1,19 @@ +import os + + +try: + from test.support import import_helper # type: ignore +except ImportError: + # Python 3.9 and earlier + class import_helper: # type: ignore + from test.support import modules_setup, modules_cleanup + + +try: + # Python 3.10 + from test.support.os_helper import unlink +except ImportError: + from test.support import unlink as _unlink + + def unlink(target): + return _unlink(os.fspath(target)) diff --git a/pkg_resources/_vendor/importlib_resources/tests/data01/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/data01/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/data01/__init__.py diff --git a/pkg_resources/_vendor/importlib_resources/tests/data01/binary.file b/pkg_resources/_vendor/importlib_resources/tests/data01/binary.file Binary files differnew file mode 100644 index 00000000..eaf36c1d --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/data01/binary.file diff --git a/pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py diff --git a/pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/binary.file b/pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/binary.file Binary files differnew file mode 100644 index 00000000..eaf36c1d --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/binary.file diff --git a/pkg_resources/_vendor/importlib_resources/tests/data01/utf-16.file b/pkg_resources/_vendor/importlib_resources/tests/data01/utf-16.file Binary files differnew file mode 100644 index 00000000..2cb77229 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/data01/utf-16.file diff --git a/pkg_resources/_vendor/importlib_resources/tests/data01/utf-8.file b/pkg_resources/_vendor/importlib_resources/tests/data01/utf-8.file new file mode 100644 index 00000000..1c0132ad --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/data01/utf-8.file @@ -0,0 +1 @@ +Hello, UTF-8 world! diff --git a/pkg_resources/_vendor/importlib_resources/tests/data02/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/data02/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/data02/__init__.py diff --git a/pkg_resources/_vendor/importlib_resources/tests/data02/one/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/data02/one/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/data02/one/__init__.py diff --git a/pkg_resources/_vendor/importlib_resources/tests/data02/one/resource1.txt b/pkg_resources/_vendor/importlib_resources/tests/data02/one/resource1.txt new file mode 100644 index 00000000..61a813e4 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/data02/one/resource1.txt @@ -0,0 +1 @@ +one resource diff --git a/pkg_resources/_vendor/importlib_resources/tests/data02/two/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/data02/two/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/data02/two/__init__.py diff --git a/pkg_resources/_vendor/importlib_resources/tests/data02/two/resource2.txt b/pkg_resources/_vendor/importlib_resources/tests/data02/two/resource2.txt new file mode 100644 index 00000000..a80ce46e --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/data02/two/resource2.txt @@ -0,0 +1 @@ +two resource diff --git a/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/binary.file b/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/binary.file Binary files differnew file mode 100644 index 00000000..eaf36c1d --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/binary.file diff --git a/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/utf-16.file b/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/utf-16.file Binary files differnew file mode 100644 index 00000000..2cb77229 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/utf-16.file diff --git a/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/utf-8.file b/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/utf-8.file new file mode 100644 index 00000000..1c0132ad --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/utf-8.file @@ -0,0 +1 @@ +Hello, UTF-8 world! diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py b/pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py new file mode 100644 index 00000000..d92c7c56 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py @@ -0,0 +1,102 @@ +import io +import unittest + +import importlib_resources as resources + +from importlib_resources._adapters import ( + CompatibilityFiles, + wrap_spec, +) + +from . import util + + +class CompatibilityFilesTests(unittest.TestCase): + @property + def package(self): + bytes_data = io.BytesIO(b'Hello, world!') + return util.create_package( + file=bytes_data, + path='some_path', + contents=('a', 'b', 'c'), + ) + + @property + def files(self): + return resources.files(self.package) + + def test_spec_path_iter(self): + self.assertEqual( + sorted(path.name for path in self.files.iterdir()), + ['a', 'b', 'c'], + ) + + def test_child_path_iter(self): + self.assertEqual(list((self.files / 'a').iterdir()), []) + + def test_orphan_path_iter(self): + self.assertEqual(list((self.files / 'a' / 'a').iterdir()), []) + self.assertEqual(list((self.files / 'a' / 'a' / 'a').iterdir()), []) + + def test_spec_path_is(self): + self.assertFalse(self.files.is_file()) + self.assertFalse(self.files.is_dir()) + + def test_child_path_is(self): + self.assertTrue((self.files / 'a').is_file()) + self.assertFalse((self.files / 'a').is_dir()) + + def test_orphan_path_is(self): + self.assertFalse((self.files / 'a' / 'a').is_file()) + self.assertFalse((self.files / 'a' / 'a').is_dir()) + self.assertFalse((self.files / 'a' / 'a' / 'a').is_file()) + self.assertFalse((self.files / 'a' / 'a' / 'a').is_dir()) + + def test_spec_path_name(self): + self.assertEqual(self.files.name, 'testingpackage') + + def test_child_path_name(self): + self.assertEqual((self.files / 'a').name, 'a') + + def test_orphan_path_name(self): + self.assertEqual((self.files / 'a' / 'b').name, 'b') + self.assertEqual((self.files / 'a' / 'b' / 'c').name, 'c') + + def test_spec_path_open(self): + self.assertEqual(self.files.read_bytes(), b'Hello, world!') + self.assertEqual(self.files.read_text(), 'Hello, world!') + + def test_child_path_open(self): + self.assertEqual((self.files / 'a').read_bytes(), b'Hello, world!') + self.assertEqual((self.files / 'a').read_text(), 'Hello, world!') + + def test_orphan_path_open(self): + with self.assertRaises(FileNotFoundError): + (self.files / 'a' / 'b').read_bytes() + with self.assertRaises(FileNotFoundError): + (self.files / 'a' / 'b' / 'c').read_bytes() + + def test_open_invalid_mode(self): + with self.assertRaises(ValueError): + self.files.open('0') + + def test_orphan_path_invalid(self): + with self.assertRaises(ValueError): + CompatibilityFiles.OrphanPath() + + def test_wrap_spec(self): + spec = wrap_spec(self.package) + self.assertIsInstance(spec.loader.get_resource_reader(None), CompatibilityFiles) + + +class CompatibilityFilesNoReaderTests(unittest.TestCase): + @property + def package(self): + return util.create_package_from_loader(None) + + @property + def files(self): + return resources.files(self.package) + + def test_spec_path_joinpath(self): + self.assertIsInstance(self.files / 'a', CompatibilityFiles.OrphanPath) diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_contents.py b/pkg_resources/_vendor/importlib_resources/tests/test_contents.py new file mode 100644 index 00000000..525568e8 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/test_contents.py @@ -0,0 +1,43 @@ +import unittest +import importlib_resources as resources + +from . import data01 +from . import util + + +class ContentsTests: + expected = { + '__init__.py', + 'binary.file', + 'subdirectory', + 'utf-16.file', + 'utf-8.file', + } + + def test_contents(self): + contents = {path.name for path in resources.files(self.data).iterdir()} + assert self.expected <= contents + + +class ContentsDiskTests(ContentsTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase): + pass + + +class ContentsNamespaceTests(ContentsTests, unittest.TestCase): + expected = { + # no __init__ because of namespace design + # no subdirectory as incidental difference in fixture + 'binary.file', + 'utf-16.file', + 'utf-8.file', + } + + def setUp(self): + from . import namespacedata01 + + self.data = namespacedata01 diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_files.py b/pkg_resources/_vendor/importlib_resources/tests/test_files.py new file mode 100644 index 00000000..2676b49e --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/test_files.py @@ -0,0 +1,46 @@ +import typing +import unittest + +import importlib_resources as resources +from importlib_resources.abc import Traversable +from . import data01 +from . import util + + +class FilesTests: + def test_read_bytes(self): + files = resources.files(self.data) + actual = files.joinpath('utf-8.file').read_bytes() + assert actual == b'Hello, UTF-8 world!\n' + + def test_read_text(self): + files = resources.files(self.data) + actual = files.joinpath('utf-8.file').read_text(encoding='utf-8') + assert actual == 'Hello, UTF-8 world!\n' + + @unittest.skipUnless( + hasattr(typing, 'runtime_checkable'), + "Only suitable when typing supports runtime_checkable", + ) + def test_traversable(self): + assert isinstance(resources.files(self.data), Traversable) + + +class OpenDiskTests(FilesTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase): + pass + + +class OpenNamespaceTests(FilesTests, unittest.TestCase): + def setUp(self): + from . import namespacedata01 + + self.data = namespacedata01 + + +if __name__ == '__main__': + unittest.main() diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_open.py b/pkg_resources/_vendor/importlib_resources/tests/test_open.py new file mode 100644 index 00000000..87b42c3d --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/test_open.py @@ -0,0 +1,81 @@ +import unittest + +import importlib_resources as resources +from . import data01 +from . import util + + +class CommonBinaryTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + target = resources.files(package).joinpath(path) + with target.open('rb'): + pass + + +class CommonTextTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + target = resources.files(package).joinpath(path) + with target.open(): + pass + + +class OpenTests: + def test_open_binary(self): + target = resources.files(self.data) / 'binary.file' + with target.open('rb') as fp: + result = fp.read() + self.assertEqual(result, b'\x00\x01\x02\x03') + + def test_open_text_default_encoding(self): + target = resources.files(self.data) / 'utf-8.file' + with target.open() as fp: + result = fp.read() + self.assertEqual(result, 'Hello, UTF-8 world!\n') + + def test_open_text_given_encoding(self): + target = resources.files(self.data) / 'utf-16.file' + with target.open(encoding='utf-16', errors='strict') as fp: + result = fp.read() + self.assertEqual(result, 'Hello, UTF-16 world!\n') + + def test_open_text_with_errors(self): + # Raises UnicodeError without the 'errors' argument. + target = resources.files(self.data) / 'utf-16.file' + with target.open(encoding='utf-8', errors='strict') as fp: + self.assertRaises(UnicodeError, fp.read) + with target.open(encoding='utf-8', errors='ignore') as fp: + result = fp.read() + self.assertEqual( + result, + 'H\x00e\x00l\x00l\x00o\x00,\x00 ' + '\x00U\x00T\x00F\x00-\x001\x006\x00 ' + '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00', + ) + + def test_open_binary_FileNotFoundError(self): + target = resources.files(self.data) / 'does-not-exist' + self.assertRaises(FileNotFoundError, target.open, 'rb') + + def test_open_text_FileNotFoundError(self): + target = resources.files(self.data) / 'does-not-exist' + self.assertRaises(FileNotFoundError, target.open) + + +class OpenDiskTests(OpenTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class OpenDiskNamespaceTests(OpenTests, unittest.TestCase): + def setUp(self): + from . import namespacedata01 + + self.data = namespacedata01 + + +class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase): + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_path.py b/pkg_resources/_vendor/importlib_resources/tests/test_path.py new file mode 100644 index 00000000..4f4d3943 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/test_path.py @@ -0,0 +1,64 @@ +import io +import unittest + +import importlib_resources as resources +from . import data01 +from . import util + + +class CommonTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + with resources.as_file(resources.files(package).joinpath(path)): + pass + + +class PathTests: + def test_reading(self): + # Path should be readable. + # Test also implicitly verifies the returned object is a pathlib.Path + # instance. + target = resources.files(self.data) / 'utf-8.file' + with resources.as_file(target) as path: + self.assertTrue(path.name.endswith("utf-8.file"), repr(path)) + # pathlib.Path.read_text() was introduced in Python 3.5. + with path.open('r', encoding='utf-8') as file: + text = file.read() + self.assertEqual('Hello, UTF-8 world!\n', text) + + +class PathDiskTests(PathTests, unittest.TestCase): + data = data01 + + def test_natural_path(self): + """ + Guarantee the internal implementation detail that + file-system-backed resources do not get the tempdir + treatment. + """ + target = resources.files(self.data) / 'utf-8.file' + with resources.as_file(target) as path: + assert 'data' in str(path) + + +class PathMemoryTests(PathTests, unittest.TestCase): + def setUp(self): + file = io.BytesIO(b'Hello, UTF-8 world!\n') + self.addCleanup(file.close) + self.data = util.create_package( + file=file, path=FileNotFoundError("package exists only in memory") + ) + self.data.__spec__.origin = None + self.data.__spec__.has_location = False + + +class PathZipTests(PathTests, util.ZipSetup, unittest.TestCase): + def test_remove_in_context_manager(self): + # It is not an error if the file that was temporarily stashed on the + # file system is removed inside the `with` stanza. + target = resources.files(self.data) / 'utf-8.file' + with resources.as_file(target) as path: + path.unlink() + + +if __name__ == '__main__': + unittest.main() diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_read.py b/pkg_resources/_vendor/importlib_resources/tests/test_read.py new file mode 100644 index 00000000..41dd6db5 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/test_read.py @@ -0,0 +1,76 @@ +import unittest +import importlib_resources as resources + +from . import data01 +from . import util +from importlib import import_module + + +class CommonBinaryTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + resources.files(package).joinpath(path).read_bytes() + + +class CommonTextTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + resources.files(package).joinpath(path).read_text() + + +class ReadTests: + def test_read_bytes(self): + result = resources.files(self.data).joinpath('binary.file').read_bytes() + self.assertEqual(result, b'\0\1\2\3') + + def test_read_text_default_encoding(self): + result = resources.files(self.data).joinpath('utf-8.file').read_text() + self.assertEqual(result, 'Hello, UTF-8 world!\n') + + def test_read_text_given_encoding(self): + result = ( + resources.files(self.data) + .joinpath('utf-16.file') + .read_text(encoding='utf-16') + ) + self.assertEqual(result, 'Hello, UTF-16 world!\n') + + def test_read_text_with_errors(self): + # Raises UnicodeError without the 'errors' argument. + target = resources.files(self.data) / 'utf-16.file' + self.assertRaises(UnicodeError, target.read_text, encoding='utf-8') + result = target.read_text(encoding='utf-8', errors='ignore') + self.assertEqual( + result, + 'H\x00e\x00l\x00l\x00o\x00,\x00 ' + '\x00U\x00T\x00F\x00-\x001\x006\x00 ' + '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00', + ) + + +class ReadDiskTests(ReadTests, unittest.TestCase): + data = data01 + + +class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase): + def test_read_submodule_resource(self): + submodule = import_module('ziptestdata.subdirectory') + result = resources.files(submodule).joinpath('binary.file').read_bytes() + self.assertEqual(result, b'\0\1\2\3') + + def test_read_submodule_resource_by_name(self): + result = ( + resources.files('ziptestdata.subdirectory') + .joinpath('binary.file') + .read_bytes() + ) + self.assertEqual(result, b'\0\1\2\3') + + +class ReadNamespaceTests(ReadTests, unittest.TestCase): + def setUp(self): + from . import namespacedata01 + + self.data = namespacedata01 + + +if __name__ == '__main__': + unittest.main() diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_reader.py b/pkg_resources/_vendor/importlib_resources/tests/test_reader.py new file mode 100644 index 00000000..16841a50 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/test_reader.py @@ -0,0 +1,128 @@ +import os.path +import sys +import pathlib +import unittest + +from importlib import import_module +from importlib_resources.readers import MultiplexedPath, NamespaceReader + + +class MultiplexedPathTest(unittest.TestCase): + @classmethod + def setUpClass(cls): + path = pathlib.Path(__file__).parent / 'namespacedata01' + cls.folder = str(path) + + def test_init_no_paths(self): + with self.assertRaises(FileNotFoundError): + MultiplexedPath() + + def test_init_file(self): + with self.assertRaises(NotADirectoryError): + MultiplexedPath(os.path.join(self.folder, 'binary.file')) + + def test_iterdir(self): + contents = {path.name for path in MultiplexedPath(self.folder).iterdir()} + try: + contents.remove('__pycache__') + except (KeyError, ValueError): + pass + self.assertEqual(contents, {'binary.file', 'utf-16.file', 'utf-8.file'}) + + def test_iterdir_duplicate(self): + data01 = os.path.abspath(os.path.join(__file__, '..', 'data01')) + contents = { + path.name for path in MultiplexedPath(self.folder, data01).iterdir() + } + for remove in ('__pycache__', '__init__.pyc'): + try: + contents.remove(remove) + except (KeyError, ValueError): + pass + self.assertEqual( + contents, + {'__init__.py', 'binary.file', 'subdirectory', 'utf-16.file', 'utf-8.file'}, + ) + + def test_is_dir(self): + self.assertEqual(MultiplexedPath(self.folder).is_dir(), True) + + def test_is_file(self): + self.assertEqual(MultiplexedPath(self.folder).is_file(), False) + + def test_open_file(self): + path = MultiplexedPath(self.folder) + with self.assertRaises(FileNotFoundError): + path.read_bytes() + with self.assertRaises(FileNotFoundError): + path.read_text() + with self.assertRaises(FileNotFoundError): + path.open() + + def test_join_path(self): + prefix = os.path.abspath(os.path.join(__file__, '..')) + data01 = os.path.join(prefix, 'data01') + path = MultiplexedPath(self.folder, data01) + self.assertEqual( + str(path.joinpath('binary.file'))[len(prefix) + 1 :], + os.path.join('namespacedata01', 'binary.file'), + ) + self.assertEqual( + str(path.joinpath('subdirectory'))[len(prefix) + 1 :], + os.path.join('data01', 'subdirectory'), + ) + self.assertEqual( + str(path.joinpath('imaginary'))[len(prefix) + 1 :], + os.path.join('namespacedata01', 'imaginary'), + ) + + def test_repr(self): + self.assertEqual( + repr(MultiplexedPath(self.folder)), + f"MultiplexedPath('{self.folder}')", + ) + + def test_name(self): + self.assertEqual( + MultiplexedPath(self.folder).name, + os.path.basename(self.folder), + ) + + +class NamespaceReaderTest(unittest.TestCase): + site_dir = str(pathlib.Path(__file__).parent) + + @classmethod + def setUpClass(cls): + sys.path.append(cls.site_dir) + + @classmethod + def tearDownClass(cls): + sys.path.remove(cls.site_dir) + + def test_init_error(self): + with self.assertRaises(ValueError): + NamespaceReader(['path1', 'path2']) + + def test_resource_path(self): + namespacedata01 = import_module('namespacedata01') + reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations) + + root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01')) + self.assertEqual( + reader.resource_path('binary.file'), os.path.join(root, 'binary.file') + ) + self.assertEqual( + reader.resource_path('imaginary'), os.path.join(root, 'imaginary') + ) + + def test_files(self): + namespacedata01 = import_module('namespacedata01') + reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations) + root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01')) + self.assertIsInstance(reader.files(), MultiplexedPath) + self.assertEqual(repr(reader.files()), f"MultiplexedPath('{root}')") + + +if __name__ == '__main__': + unittest.main() diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_resource.py b/pkg_resources/_vendor/importlib_resources/tests/test_resource.py new file mode 100644 index 00000000..5affd8b0 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/test_resource.py @@ -0,0 +1,252 @@ +import sys +import unittest +import importlib_resources as resources +import uuid +import pathlib + +from . import data01 +from . import zipdata01, zipdata02 +from . import util +from importlib import import_module +from ._compat import import_helper, unlink + + +class ResourceTests: + # Subclasses are expected to set the `data` attribute. + + def test_is_file_exists(self): + target = resources.files(self.data) / 'binary.file' + self.assertTrue(target.is_file()) + + def test_is_file_missing(self): + target = resources.files(self.data) / 'not-a-file' + self.assertFalse(target.is_file()) + + def test_is_dir(self): + target = resources.files(self.data) / 'subdirectory' + self.assertFalse(target.is_file()) + self.assertTrue(target.is_dir()) + + +class ResourceDiskTests(ResourceTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class ResourceZipTests(ResourceTests, util.ZipSetup, unittest.TestCase): + pass + + +def names(traversable): + return {item.name for item in traversable.iterdir()} + + +class ResourceLoaderTests(unittest.TestCase): + def test_resource_contents(self): + package = util.create_package( + file=data01, path=data01.__file__, contents=['A', 'B', 'C'] + ) + self.assertEqual(names(resources.files(package)), {'A', 'B', 'C'}) + + def test_is_file(self): + package = util.create_package( + file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] + ) + self.assertTrue(resources.files(package).joinpath('B').is_file()) + + def test_is_dir(self): + package = util.create_package( + file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] + ) + self.assertTrue(resources.files(package).joinpath('D').is_dir()) + + def test_resource_missing(self): + package = util.create_package( + file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] + ) + self.assertFalse(resources.files(package).joinpath('Z').is_file()) + + +class ResourceCornerCaseTests(unittest.TestCase): + def test_package_has_no_reader_fallback(self): + # Test odd ball packages which: + # 1. Do not have a ResourceReader as a loader + # 2. Are not on the file system + # 3. Are not in a zip file + module = util.create_package( + file=data01, path=data01.__file__, contents=['A', 'B', 'C'] + ) + # Give the module a dummy loader. + module.__loader__ = object() + # Give the module a dummy origin. + module.__file__ = '/path/which/shall/not/be/named' + module.__spec__.loader = module.__loader__ + module.__spec__.origin = module.__file__ + self.assertFalse(resources.files(module).joinpath('A').is_file()) + + +class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase): + ZIP_MODULE = zipdata01 # type: ignore + + def test_is_submodule_resource(self): + submodule = import_module('ziptestdata.subdirectory') + self.assertTrue(resources.files(submodule).joinpath('binary.file').is_file()) + + def test_read_submodule_resource_by_name(self): + self.assertTrue( + resources.files('ziptestdata.subdirectory') + .joinpath('binary.file') + .is_file() + ) + + def test_submodule_contents(self): + submodule = import_module('ziptestdata.subdirectory') + self.assertEqual( + names(resources.files(submodule)), {'__init__.py', 'binary.file'} + ) + + def test_submodule_contents_by_name(self): + self.assertEqual( + names(resources.files('ziptestdata.subdirectory')), + {'__init__.py', 'binary.file'}, + ) + + +class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase): + ZIP_MODULE = zipdata02 # type: ignore + + def test_unrelated_contents(self): + """ + Test thata zip with two unrelated subpackages return + distinct resources. Ref python/importlib_resources#44. + """ + self.assertEqual( + names(resources.files('ziptestdata.one')), + {'__init__.py', 'resource1.txt'}, + ) + self.assertEqual( + names(resources.files('ziptestdata.two')), + {'__init__.py', 'resource2.txt'}, + ) + + +class DeletingZipsTest(unittest.TestCase): + """Having accessed resources in a zip file should not keep an open + reference to the zip. + """ + + ZIP_MODULE = zipdata01 + + def setUp(self): + modules = import_helper.modules_setup() + self.addCleanup(import_helper.modules_cleanup, *modules) + + data_path = pathlib.Path(self.ZIP_MODULE.__file__) + data_dir = data_path.parent + self.source_zip_path = data_dir / 'ziptestdata.zip' + self.zip_path = pathlib.Path(f'{uuid.uuid4()}.zip').absolute() + self.zip_path.write_bytes(self.source_zip_path.read_bytes()) + sys.path.append(str(self.zip_path)) + self.data = import_module('ziptestdata') + + def tearDown(self): + try: + sys.path.remove(str(self.zip_path)) + except ValueError: + pass + + try: + del sys.path_importer_cache[str(self.zip_path)] + del sys.modules[self.data.__name__] + except KeyError: + pass + + try: + unlink(self.zip_path) + except OSError: + # If the test fails, this will probably fail too + pass + + def test_iterdir_does_not_keep_open(self): + c = [item.name for item in resources.files('ziptestdata').iterdir()] + self.zip_path.unlink() + del c + + def test_is_file_does_not_keep_open(self): + c = resources.files('ziptestdata').joinpath('binary.file').is_file() + self.zip_path.unlink() + del c + + def test_is_file_failure_does_not_keep_open(self): + c = resources.files('ziptestdata').joinpath('not-present').is_file() + self.zip_path.unlink() + del c + + @unittest.skip("Desired but not supported.") + def test_as_file_does_not_keep_open(self): # pragma: no cover + c = resources.as_file(resources.files('ziptestdata') / 'binary.file') + self.zip_path.unlink() + del c + + def test_entered_path_does_not_keep_open(self): + # This is what certifi does on import to make its bundle + # available for the process duration. + c = resources.as_file( + resources.files('ziptestdata') / 'binary.file' + ).__enter__() + self.zip_path.unlink() + del c + + def test_read_binary_does_not_keep_open(self): + c = resources.files('ziptestdata').joinpath('binary.file').read_bytes() + self.zip_path.unlink() + del c + + def test_read_text_does_not_keep_open(self): + c = resources.files('ziptestdata').joinpath('utf-8.file').read_text() + self.zip_path.unlink() + del c + + +class ResourceFromNamespaceTest01(unittest.TestCase): + site_dir = str(pathlib.Path(__file__).parent) + + @classmethod + def setUpClass(cls): + sys.path.append(cls.site_dir) + + @classmethod + def tearDownClass(cls): + sys.path.remove(cls.site_dir) + + def test_is_submodule_resource(self): + self.assertTrue( + resources.files(import_module('namespacedata01')) + .joinpath('binary.file') + .is_file() + ) + + def test_read_submodule_resource_by_name(self): + self.assertTrue( + resources.files('namespacedata01').joinpath('binary.file').is_file() + ) + + def test_submodule_contents(self): + contents = names(resources.files(import_module('namespacedata01'))) + try: + contents.remove('__pycache__') + except KeyError: + pass + self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'}) + + def test_submodule_contents_by_name(self): + contents = names(resources.files('namespacedata01')) + try: + contents.remove('__pycache__') + except KeyError: + pass + self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'}) + + +if __name__ == '__main__': + unittest.main() diff --git a/pkg_resources/_vendor/importlib_resources/tests/update-zips.py b/pkg_resources/_vendor/importlib_resources/tests/update-zips.py new file mode 100644 index 00000000..9ef0224c --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/update-zips.py @@ -0,0 +1,53 @@ +""" +Generate the zip test data files. + +Run to build the tests/zipdataNN/ziptestdata.zip files from +files in tests/dataNN. + +Replaces the file with the working copy, but does commit anything +to the source repo. +""" + +import contextlib +import os +import pathlib +import zipfile + + +def main(): + """ + >>> from unittest import mock + >>> monkeypatch = getfixture('monkeypatch') + >>> monkeypatch.setattr(zipfile, 'ZipFile', mock.MagicMock()) + >>> print(); main() # print workaround for bpo-32509 + <BLANKLINE> + ...data01... -> ziptestdata/... + ... + ...data02... -> ziptestdata/... + ... + """ + suffixes = '01', '02' + tuple(map(generate, suffixes)) + + +def generate(suffix): + root = pathlib.Path(__file__).parent.relative_to(os.getcwd()) + zfpath = root / f'zipdata{suffix}/ziptestdata.zip' + with zipfile.ZipFile(zfpath, 'w') as zf: + for src, rel in walk(root / f'data{suffix}'): + dst = 'ziptestdata' / pathlib.PurePosixPath(rel.as_posix()) + print(src, '->', dst) + zf.write(src, dst) + + +def walk(datapath): + for dirpath, dirnames, filenames in os.walk(datapath): + with contextlib.suppress(KeyError): + dirnames.remove('__pycache__') + for filename in filenames: + res = pathlib.Path(dirpath) / filename + rel = res.relative_to(datapath) + yield res, rel + + +__name__ == '__main__' and main() diff --git a/pkg_resources/_vendor/importlib_resources/tests/util.py b/pkg_resources/_vendor/importlib_resources/tests/util.py new file mode 100644 index 00000000..c6d83e4b --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/util.py @@ -0,0 +1,178 @@ +import abc +import importlib +import io +import sys +import types +from pathlib import Path, PurePath + +from . import data01 +from . import zipdata01 +from ..abc import ResourceReader +from ._compat import import_helper + + +from importlib.machinery import ModuleSpec + + +class Reader(ResourceReader): + def __init__(self, **kwargs): + vars(self).update(kwargs) + + def get_resource_reader(self, package): + return self + + def open_resource(self, path): + self._path = path + if isinstance(self.file, Exception): + raise self.file + return self.file + + def resource_path(self, path_): + self._path = path_ + if isinstance(self.path, Exception): + raise self.path + return self.path + + def is_resource(self, path_): + self._path = path_ + if isinstance(self.path, Exception): + raise self.path + + def part(entry): + return entry.split('/') + + return any( + len(parts) == 1 and parts[0] == path_ for parts in map(part, self._contents) + ) + + def contents(self): + if isinstance(self.path, Exception): + raise self.path + yield from self._contents + + +def create_package_from_loader(loader, is_package=True): + name = 'testingpackage' + module = types.ModuleType(name) + spec = ModuleSpec(name, loader, origin='does-not-exist', is_package=is_package) + module.__spec__ = spec + module.__loader__ = loader + return module + + +def create_package(file=None, path=None, is_package=True, contents=()): + return create_package_from_loader( + Reader(file=file, path=path, _contents=contents), + is_package, + ) + + +class CommonTests(metaclass=abc.ABCMeta): + """ + Tests shared by test_open, test_path, and test_read. + """ + + @abc.abstractmethod + def execute(self, package, path): + """ + Call the pertinent legacy API function (e.g. open_text, path) + on package and path. + """ + + def test_package_name(self): + # Passing in the package name should succeed. + self.execute(data01.__name__, 'utf-8.file') + + def test_package_object(self): + # Passing in the package itself should succeed. + self.execute(data01, 'utf-8.file') + + def test_string_path(self): + # Passing in a string for the path should succeed. + path = 'utf-8.file' + self.execute(data01, path) + + def test_pathlib_path(self): + # Passing in a pathlib.PurePath object for the path should succeed. + path = PurePath('utf-8.file') + self.execute(data01, path) + + def test_importing_module_as_side_effect(self): + # The anchor package can already be imported. + del sys.modules[data01.__name__] + self.execute(data01.__name__, 'utf-8.file') + + def test_non_package_by_name(self): + # The anchor package cannot be a module. + with self.assertRaises(TypeError): + self.execute(__name__, 'utf-8.file') + + def test_non_package_by_package(self): + # The anchor package cannot be a module. + with self.assertRaises(TypeError): + module = sys.modules['importlib_resources.tests.util'] + self.execute(module, 'utf-8.file') + + def test_missing_path(self): + # Attempting to open or read or request the path for a + # non-existent path should succeed if open_resource + # can return a viable data stream. + bytes_data = io.BytesIO(b'Hello, world!') + package = create_package(file=bytes_data, path=FileNotFoundError()) + self.execute(package, 'utf-8.file') + self.assertEqual(package.__loader__._path, 'utf-8.file') + + def test_extant_path(self): + # Attempting to open or read or request the path when the + # path does exist should still succeed. Does not assert + # anything about the result. + bytes_data = io.BytesIO(b'Hello, world!') + # any path that exists + path = __file__ + package = create_package(file=bytes_data, path=path) + self.execute(package, 'utf-8.file') + self.assertEqual(package.__loader__._path, 'utf-8.file') + + def test_useless_loader(self): + package = create_package(file=FileNotFoundError(), path=FileNotFoundError()) + with self.assertRaises(FileNotFoundError): + self.execute(package, 'utf-8.file') + + +class ZipSetupBase: + ZIP_MODULE = None + + @classmethod + def setUpClass(cls): + data_path = Path(cls.ZIP_MODULE.__file__) + data_dir = data_path.parent + cls._zip_path = str(data_dir / 'ziptestdata.zip') + sys.path.append(cls._zip_path) + cls.data = importlib.import_module('ziptestdata') + + @classmethod + def tearDownClass(cls): + try: + sys.path.remove(cls._zip_path) + except ValueError: + pass + + try: + del sys.path_importer_cache[cls._zip_path] + del sys.modules[cls.data.__name__] + except KeyError: + pass + + try: + del cls.data + del cls._zip_path + except AttributeError: + pass + + def setUp(self): + modules = import_helper.modules_setup() + self.addCleanup(import_helper.modules_cleanup, *modules) + + +class ZipSetup(ZipSetupBase): + ZIP_MODULE = zipdata01 # type: ignore diff --git a/pkg_resources/_vendor/importlib_resources/tests/zipdata01/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/zipdata01/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/zipdata01/__init__.py diff --git a/pkg_resources/_vendor/importlib_resources/tests/zipdata01/ziptestdata.zip b/pkg_resources/_vendor/importlib_resources/tests/zipdata01/ziptestdata.zip Binary files differnew file mode 100644 index 00000000..9a3bb073 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/zipdata01/ziptestdata.zip diff --git a/pkg_resources/_vendor/importlib_resources/tests/zipdata02/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/zipdata02/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/zipdata02/__init__.py diff --git a/pkg_resources/_vendor/importlib_resources/tests/zipdata02/ziptestdata.zip b/pkg_resources/_vendor/importlib_resources/tests/zipdata02/ziptestdata.zip Binary files differnew file mode 100644 index 00000000..d63ff512 --- /dev/null +++ b/pkg_resources/_vendor/importlib_resources/tests/zipdata02/ziptestdata.zip diff --git a/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/INSTALLER b/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/LICENSE b/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/LICENSE new file mode 100644 index 00000000..353924be --- /dev/null +++ b/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/METADATA b/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/METADATA new file mode 100644 index 00000000..908711b7 --- /dev/null +++ b/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/METADATA @@ -0,0 +1,52 @@ +Metadata-Version: 2.1 +Name: jaraco.context +Version: 4.1.1 +Summary: Context managers by jaraco +Home-page: https://github.com/jaraco/jaraco.context +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.6 +License-File: LICENSE +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/jaraco.context.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/jaraco.context.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/jaraco.context + +.. image:: https://github.com/jaraco/jaraco.context/workflows/tests/badge.svg + :target: https://github.com/jaraco/jaraco.context/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://readthedocs.org/projects/jaracocontext/badge/?version=latest + :target: https://jaracocontext.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2021-informational + :target: https://blog.jaraco.com/skeleton + + diff --git a/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/RECORD b/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/RECORD new file mode 100644 index 00000000..f40d48c7 --- /dev/null +++ b/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/RECORD @@ -0,0 +1,8 @@ +jaraco.context-4.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.context-4.1.1.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+jaraco.context-4.1.1.dist-info/METADATA,sha256=bvqDGCk6Z7TkohUqr5XZm19SbF9mVxrtXjN6uF_BAMQ,2031
+jaraco.context-4.1.1.dist-info/RECORD,,
+jaraco.context-4.1.1.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
+jaraco.context-4.1.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/__pycache__/context.cpython-310.pyc,,
+jaraco/context.py,sha256=7X1tpCLc5EN45iWGzGcsH0Unx62REIkvtRvglj0SiUA,5420
diff --git a/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/WHEEL b/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/WHEEL new file mode 100644 index 00000000..5bad85fd --- /dev/null +++ b/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt new file mode 100644 index 00000000..f6205a5f --- /dev/null +++ b/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt @@ -0,0 +1 @@ +jaraco diff --git a/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/INSTALLER b/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/LICENSE b/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/LICENSE new file mode 100644 index 00000000..353924be --- /dev/null +++ b/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/METADATA b/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/METADATA new file mode 100644 index 00000000..12dfbdd0 --- /dev/null +++ b/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/METADATA @@ -0,0 +1,58 @@ +Metadata-Version: 2.1 +Name: jaraco.functools +Version: 3.5.0 +Summary: Functools like those found in stdlib +Home-page: https://github.com/jaraco/jaraco.functools +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.7 +License-File: LICENSE +Requires-Dist: more-itertools +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: jaraco.classes ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/jaraco.functools.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/jaraco.functools.svg + +.. image:: https://img.shields.io/travis/jaraco/jaraco.functools/master.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/jaraco.functools + +.. image:: https://github.com/jaraco/jaraco.functools/workflows/tests/badge.svg + :target: https://github.com/jaraco/jaraco.functools/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://readthedocs.org/projects/jaracofunctools/badge/?version=latest + :target: https://jaracofunctools.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2021-informational + :target: https://blog.jaraco.com/skeleton + +Additional functools in the spirit of stdlib's functools. + + diff --git a/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/RECORD new file mode 100644 index 00000000..fbda3d1f --- /dev/null +++ b/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/RECORD @@ -0,0 +1,8 @@ +jaraco.functools-3.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.functools-3.5.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+jaraco.functools-3.5.0.dist-info/METADATA,sha256=cE9C7u9bo_GjLAuw4nML67a25kUaPDiHn4j03lG4jd0,2276
+jaraco.functools-3.5.0.dist-info/RECORD,,
+jaraco.functools-3.5.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
+jaraco.functools-3.5.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/__pycache__/functools.cpython-310.pyc,,
+jaraco/functools.py,sha256=PtEHbXZstgVJrwje4GvJOsz5pEbgslOcgEn2EJNpr2c,13494
diff --git a/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/WHEEL b/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/WHEEL new file mode 100644 index 00000000..5bad85fd --- /dev/null +++ b/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt new file mode 100644 index 00000000..f6205a5f --- /dev/null +++ b/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt @@ -0,0 +1 @@ +jaraco diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/INSTALLER b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/LICENSE b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/LICENSE new file mode 100644 index 00000000..353924be --- /dev/null +++ b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/METADATA b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/METADATA new file mode 100644 index 00000000..615a50a4 --- /dev/null +++ b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/METADATA @@ -0,0 +1,55 @@ +Metadata-Version: 2.1 +Name: jaraco.text +Version: 3.7.0 +Summary: Module for text manipulation +Home-page: https://github.com/jaraco/jaraco.text +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.6 +License-File: LICENSE +Requires-Dist: jaraco.functools +Requires-Dist: jaraco.context (>=4.1) +Requires-Dist: importlib-resources ; python_version < "3.9" +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/jaraco.text.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/jaraco.text.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/jaraco.text + +.. image:: https://github.com/jaraco/jaraco.text/workflows/tests/badge.svg + :target: https://github.com/jaraco/jaraco.text/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://readthedocs.org/projects/jaracotext/badge/?version=latest + :target: https://jaracotext.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2021-informational + :target: https://blog.jaraco.com/skeleton + + diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD new file mode 100644 index 00000000..916ad7d3 --- /dev/null +++ b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD @@ -0,0 +1,10 @@ +jaraco.text-3.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.text-3.7.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+jaraco.text-3.7.0.dist-info/METADATA,sha256=5mcR1dY0cJNrM-VIkAFkpjOgvgzmq6nM1GfD0gwTIhs,2136
+jaraco.text-3.7.0.dist-info/RECORD,,
+jaraco.text-3.7.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+jaraco.text-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
+jaraco.text-3.7.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/text/Lorem ipsum.txt,sha256=N_7c_79zxOufBY9HZ3yzMgOkNv-TkOTTio4BydrSjgs,1335
+jaraco/text/__init__.py,sha256=I56MW2ZFwPrYXIxzqxMBe2A1t-T4uZBgEgAKe9-JoqM,15538
+jaraco/text/__pycache__/__init__.cpython-310.pyc,,
diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/REQUESTED b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/REQUESTED diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/WHEEL b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/WHEEL new file mode 100644 index 00000000..becc9a66 --- /dev/null +++ b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt new file mode 100644 index 00000000..f6205a5f --- /dev/null +++ b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt @@ -0,0 +1 @@ +jaraco diff --git a/pkg_resources/_vendor/jaraco/__init__.py b/pkg_resources/_vendor/jaraco/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pkg_resources/_vendor/jaraco/__init__.py diff --git a/pkg_resources/_vendor/jaraco/context.py b/pkg_resources/_vendor/jaraco/context.py new file mode 100644 index 00000000..87a4e3dc --- /dev/null +++ b/pkg_resources/_vendor/jaraco/context.py @@ -0,0 +1,213 @@ +import os +import subprocess +import contextlib +import functools +import tempfile +import shutil +import operator + + +@contextlib.contextmanager +def pushd(dir): + orig = os.getcwd() + os.chdir(dir) + try: + yield dir + finally: + os.chdir(orig) + + +@contextlib.contextmanager +def tarball_context(url, target_dir=None, runner=None, pushd=pushd): + """ + Get a tarball, extract it, change to that directory, yield, then + clean up. + `runner` is the function to invoke commands. + `pushd` is a context manager for changing the directory. + """ + if target_dir is None: + target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '') + if runner is None: + runner = functools.partial(subprocess.check_call, shell=True) + # In the tar command, use --strip-components=1 to strip the first path and + # then + # use -C to cause the files to be extracted to {target_dir}. This ensures + # that we always know where the files were extracted. + runner('mkdir {target_dir}'.format(**vars())) + try: + getter = 'wget {url} -O -' + extract = 'tar x{compression} --strip-components=1 -C {target_dir}' + cmd = ' | '.join((getter, extract)) + runner(cmd.format(compression=infer_compression(url), **vars())) + with pushd(target_dir): + yield target_dir + finally: + runner('rm -Rf {target_dir}'.format(**vars())) + + +def infer_compression(url): + """ + Given a URL or filename, infer the compression code for tar. + """ + # cheat and just assume it's the last two characters + compression_indicator = url[-2:] + mapping = dict(gz='z', bz='j', xz='J') + # Assume 'z' (gzip) if no match + return mapping.get(compression_indicator, 'z') + + +@contextlib.contextmanager +def temp_dir(remover=shutil.rmtree): + """ + Create a temporary directory context. Pass a custom remover + to override the removal behavior. + """ + temp_dir = tempfile.mkdtemp() + try: + yield temp_dir + finally: + remover(temp_dir) + + +@contextlib.contextmanager +def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir): + """ + Check out the repo indicated by url. + + If dest_ctx is supplied, it should be a context manager + to yield the target directory for the check out. + """ + exe = 'git' if 'git' in url else 'hg' + with dest_ctx() as repo_dir: + cmd = [exe, 'clone', url, repo_dir] + if branch: + cmd.extend(['--branch', branch]) + devnull = open(os.path.devnull, 'w') + stdout = devnull if quiet else None + subprocess.check_call(cmd, stdout=stdout) + yield repo_dir + + +@contextlib.contextmanager +def null(): + yield + + +class ExceptionTrap: + """ + A context manager that will catch certain exceptions and provide an + indication they occurred. + + >>> with ExceptionTrap() as trap: + ... raise Exception() + >>> bool(trap) + True + + >>> with ExceptionTrap() as trap: + ... pass + >>> bool(trap) + False + + >>> with ExceptionTrap(ValueError) as trap: + ... raise ValueError("1 + 1 is not 3") + >>> bool(trap) + True + + >>> with ExceptionTrap(ValueError) as trap: + ... raise Exception() + Traceback (most recent call last): + ... + Exception + + >>> bool(trap) + False + """ + + exc_info = None, None, None + + def __init__(self, exceptions=(Exception,)): + self.exceptions = exceptions + + def __enter__(self): + return self + + @property + def type(self): + return self.exc_info[0] + + @property + def value(self): + return self.exc_info[1] + + @property + def tb(self): + return self.exc_info[2] + + def __exit__(self, *exc_info): + type = exc_info[0] + matches = type and issubclass(type, self.exceptions) + if matches: + self.exc_info = exc_info + return matches + + def __bool__(self): + return bool(self.type) + + def raises(self, func, *, _test=bool): + """ + Wrap func and replace the result with the truth + value of the trap (True if an exception occurred). + + First, give the decorator an alias to support Python 3.8 + Syntax. + + >>> raises = ExceptionTrap(ValueError).raises + + Now decorate a function that always fails. + + >>> @raises + ... def fail(): + ... raise ValueError('failed') + >>> fail() + True + """ + + @functools.wraps(func) + def wrapper(*args, **kwargs): + with ExceptionTrap(self.exceptions) as trap: + func(*args, **kwargs) + return _test(trap) + + return wrapper + + def passes(self, func): + """ + Wrap func and replace the result with the truth + value of the trap (True if no exception). + + First, give the decorator an alias to support Python 3.8 + Syntax. + + >>> passes = ExceptionTrap(ValueError).passes + + Now decorate a function that always fails. + + >>> @passes + ... def fail(): + ... raise ValueError('failed') + + >>> fail() + False + """ + return self.raises(func, _test=operator.not_) + + +class suppress(contextlib.suppress, contextlib.ContextDecorator): + """ + A version of contextlib.suppress with decorator support. + + >>> @suppress(KeyError) + ... def key_error(): + ... {}[''] + >>> key_error() + """ diff --git a/pkg_resources/_vendor/jaraco/functools.py b/pkg_resources/_vendor/jaraco/functools.py new file mode 100644 index 00000000..a3fea3a1 --- /dev/null +++ b/pkg_resources/_vendor/jaraco/functools.py @@ -0,0 +1,525 @@ +import functools +import time +import inspect +import collections +import types +import itertools + +import pkg_resources.extern.more_itertools + +from typing import Callable, TypeVar + + +CallableT = TypeVar("CallableT", bound=Callable[..., object]) + + +def compose(*funcs): + """ + Compose any number of unary functions into a single unary function. + + >>> import textwrap + >>> expected = str.strip(textwrap.dedent(compose.__doc__)) + >>> strip_and_dedent = compose(str.strip, textwrap.dedent) + >>> strip_and_dedent(compose.__doc__) == expected + True + + Compose also allows the innermost function to take arbitrary arguments. + + >>> round_three = lambda x: round(x, ndigits=3) + >>> f = compose(round_three, int.__truediv__) + >>> [f(3*x, x+1) for x in range(1,10)] + [1.5, 2.0, 2.25, 2.4, 2.5, 2.571, 2.625, 2.667, 2.7] + """ + + def compose_two(f1, f2): + return lambda *args, **kwargs: f1(f2(*args, **kwargs)) + + return functools.reduce(compose_two, funcs) + + +def method_caller(method_name, *args, **kwargs): + """ + Return a function that will call a named method on the + target object with optional positional and keyword + arguments. + + >>> lower = method_caller('lower') + >>> lower('MyString') + 'mystring' + """ + + def call_method(target): + func = getattr(target, method_name) + return func(*args, **kwargs) + + return call_method + + +def once(func): + """ + Decorate func so it's only ever called the first time. + + This decorator can ensure that an expensive or non-idempotent function + will not be expensive on subsequent calls and is idempotent. + + >>> add_three = once(lambda a: a+3) + >>> add_three(3) + 6 + >>> add_three(9) + 6 + >>> add_three('12') + 6 + + To reset the stored value, simply clear the property ``saved_result``. + + >>> del add_three.saved_result + >>> add_three(9) + 12 + >>> add_three(8) + 12 + + Or invoke 'reset()' on it. + + >>> add_three.reset() + >>> add_three(-3) + 0 + >>> add_three(0) + 0 + """ + + @functools.wraps(func) + def wrapper(*args, **kwargs): + if not hasattr(wrapper, 'saved_result'): + wrapper.saved_result = func(*args, **kwargs) + return wrapper.saved_result + + wrapper.reset = lambda: vars(wrapper).__delitem__('saved_result') + return wrapper + + +def method_cache( + method: CallableT, + cache_wrapper: Callable[ + [CallableT], CallableT + ] = functools.lru_cache(), # type: ignore[assignment] +) -> CallableT: + """ + Wrap lru_cache to support storing the cache data in the object instances. + + Abstracts the common paradigm where the method explicitly saves an + underscore-prefixed protected property on first call and returns that + subsequently. + + >>> class MyClass: + ... calls = 0 + ... + ... @method_cache + ... def method(self, value): + ... self.calls += 1 + ... return value + + >>> a = MyClass() + >>> a.method(3) + 3 + >>> for x in range(75): + ... res = a.method(x) + >>> a.calls + 75 + + Note that the apparent behavior will be exactly like that of lru_cache + except that the cache is stored on each instance, so values in one + instance will not flush values from another, and when an instance is + deleted, so are the cached values for that instance. + + >>> b = MyClass() + >>> for x in range(35): + ... res = b.method(x) + >>> b.calls + 35 + >>> a.method(0) + 0 + >>> a.calls + 75 + + Note that if method had been decorated with ``functools.lru_cache()``, + a.calls would have been 76 (due to the cached value of 0 having been + flushed by the 'b' instance). + + Clear the cache with ``.cache_clear()`` + + >>> a.method.cache_clear() + + Same for a method that hasn't yet been called. + + >>> c = MyClass() + >>> c.method.cache_clear() + + Another cache wrapper may be supplied: + + >>> cache = functools.lru_cache(maxsize=2) + >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache) + >>> a = MyClass() + >>> a.method2() + 3 + + Caution - do not subsequently wrap the method with another decorator, such + as ``@property``, which changes the semantics of the function. + + See also + http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/ + for another implementation and additional justification. + """ + + def wrapper(self: object, *args: object, **kwargs: object) -> object: + # it's the first call, replace the method with a cached, bound method + bound_method: CallableT = types.MethodType( # type: ignore[assignment] + method, self + ) + cached_method = cache_wrapper(bound_method) + setattr(self, method.__name__, cached_method) + return cached_method(*args, **kwargs) + + # Support cache clear even before cache has been created. + wrapper.cache_clear = lambda: None # type: ignore[attr-defined] + + return ( # type: ignore[return-value] + _special_method_cache(method, cache_wrapper) or wrapper + ) + + +def _special_method_cache(method, cache_wrapper): + """ + Because Python treats special methods differently, it's not + possible to use instance attributes to implement the cached + methods. + + Instead, install the wrapper method under a different name + and return a simple proxy to that wrapper. + + https://github.com/jaraco/jaraco.functools/issues/5 + """ + name = method.__name__ + special_names = '__getattr__', '__getitem__' + if name not in special_names: + return + + wrapper_name = '__cached' + name + + def proxy(self, *args, **kwargs): + if wrapper_name not in vars(self): + bound = types.MethodType(method, self) + cache = cache_wrapper(bound) + setattr(self, wrapper_name, cache) + else: + cache = getattr(self, wrapper_name) + return cache(*args, **kwargs) + + return proxy + + +def apply(transform): + """ + Decorate a function with a transform function that is + invoked on results returned from the decorated function. + + >>> @apply(reversed) + ... def get_numbers(start): + ... "doc for get_numbers" + ... return range(start, start+3) + >>> list(get_numbers(4)) + [6, 5, 4] + >>> get_numbers.__doc__ + 'doc for get_numbers' + """ + + def wrap(func): + return functools.wraps(func)(compose(transform, func)) + + return wrap + + +def result_invoke(action): + r""" + Decorate a function with an action function that is + invoked on the results returned from the decorated + function (for its side-effect), then return the original + result. + + >>> @result_invoke(print) + ... def add_two(a, b): + ... return a + b + >>> x = add_two(2, 3) + 5 + >>> x + 5 + """ + + def wrap(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + result = func(*args, **kwargs) + action(result) + return result + + return wrapper + + return wrap + + +def call_aside(f, *args, **kwargs): + """ + Call a function for its side effect after initialization. + + >>> @call_aside + ... def func(): print("called") + called + >>> func() + called + + Use functools.partial to pass parameters to the initial call + + >>> @functools.partial(call_aside, name='bingo') + ... def func(name): print("called with", name) + called with bingo + """ + f(*args, **kwargs) + return f + + +class Throttler: + """ + Rate-limit a function (or other callable) + """ + + def __init__(self, func, max_rate=float('Inf')): + if isinstance(func, Throttler): + func = func.func + self.func = func + self.max_rate = max_rate + self.reset() + + def reset(self): + self.last_called = 0 + + def __call__(self, *args, **kwargs): + self._wait() + return self.func(*args, **kwargs) + + def _wait(self): + "ensure at least 1/max_rate seconds from last call" + elapsed = time.time() - self.last_called + must_wait = 1 / self.max_rate - elapsed + time.sleep(max(0, must_wait)) + self.last_called = time.time() + + def __get__(self, obj, type=None): + return first_invoke(self._wait, functools.partial(self.func, obj)) + + +def first_invoke(func1, func2): + """ + Return a function that when invoked will invoke func1 without + any parameters (for its side-effect) and then invoke func2 + with whatever parameters were passed, returning its result. + """ + + def wrapper(*args, **kwargs): + func1() + return func2(*args, **kwargs) + + return wrapper + + +def retry_call(func, cleanup=lambda: None, retries=0, trap=()): + """ + Given a callable func, trap the indicated exceptions + for up to 'retries' times, invoking cleanup on the + exception. On the final attempt, allow any exceptions + to propagate. + """ + attempts = itertools.count() if retries == float('inf') else range(retries) + for attempt in attempts: + try: + return func() + except trap: + cleanup() + + return func() + + +def retry(*r_args, **r_kwargs): + """ + Decorator wrapper for retry_call. Accepts arguments to retry_call + except func and then returns a decorator for the decorated function. + + Ex: + + >>> @retry(retries=3) + ... def my_func(a, b): + ... "this is my funk" + ... print(a, b) + >>> my_func.__doc__ + 'this is my funk' + """ + + def decorate(func): + @functools.wraps(func) + def wrapper(*f_args, **f_kwargs): + bound = functools.partial(func, *f_args, **f_kwargs) + return retry_call(bound, *r_args, **r_kwargs) + + return wrapper + + return decorate + + +def print_yielded(func): + """ + Convert a generator into a function that prints all yielded elements + + >>> @print_yielded + ... def x(): + ... yield 3; yield None + >>> x() + 3 + None + """ + print_all = functools.partial(map, print) + print_results = compose(more_itertools.consume, print_all, func) + return functools.wraps(func)(print_results) + + +def pass_none(func): + """ + Wrap func so it's not called if its first param is None + + >>> print_text = pass_none(print) + >>> print_text('text') + text + >>> print_text(None) + """ + + @functools.wraps(func) + def wrapper(param, *args, **kwargs): + if param is not None: + return func(param, *args, **kwargs) + + return wrapper + + +def assign_params(func, namespace): + """ + Assign parameters from namespace where func solicits. + + >>> def func(x, y=3): + ... print(x, y) + >>> assigned = assign_params(func, dict(x=2, z=4)) + >>> assigned() + 2 3 + + The usual errors are raised if a function doesn't receive + its required parameters: + + >>> assigned = assign_params(func, dict(y=3, z=4)) + >>> assigned() + Traceback (most recent call last): + TypeError: func() ...argument... + + It even works on methods: + + >>> class Handler: + ... def meth(self, arg): + ... print(arg) + >>> assign_params(Handler().meth, dict(arg='crystal', foo='clear'))() + crystal + """ + sig = inspect.signature(func) + params = sig.parameters.keys() + call_ns = {k: namespace[k] for k in params if k in namespace} + return functools.partial(func, **call_ns) + + +def save_method_args(method): + """ + Wrap a method such that when it is called, the args and kwargs are + saved on the method. + + >>> class MyClass: + ... @save_method_args + ... def method(self, a, b): + ... print(a, b) + >>> my_ob = MyClass() + >>> my_ob.method(1, 2) + 1 2 + >>> my_ob._saved_method.args + (1, 2) + >>> my_ob._saved_method.kwargs + {} + >>> my_ob.method(a=3, b='foo') + 3 foo + >>> my_ob._saved_method.args + () + >>> my_ob._saved_method.kwargs == dict(a=3, b='foo') + True + + The arguments are stored on the instance, allowing for + different instance to save different args. + + >>> your_ob = MyClass() + >>> your_ob.method({str('x'): 3}, b=[4]) + {'x': 3} [4] + >>> your_ob._saved_method.args + ({'x': 3},) + >>> my_ob._saved_method.args + () + """ + args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs') + + @functools.wraps(method) + def wrapper(self, *args, **kwargs): + attr_name = '_saved_' + method.__name__ + attr = args_and_kwargs(args, kwargs) + setattr(self, attr_name, attr) + return method(self, *args, **kwargs) + + return wrapper + + +def except_(*exceptions, replace=None, use=None): + """ + Replace the indicated exceptions, if raised, with the indicated + literal replacement or evaluated expression (if present). + + >>> safe_int = except_(ValueError)(int) + >>> safe_int('five') + >>> safe_int('5') + 5 + + Specify a literal replacement with ``replace``. + + >>> safe_int_r = except_(ValueError, replace=0)(int) + >>> safe_int_r('five') + 0 + + Provide an expression to ``use`` to pass through particular parameters. + + >>> safe_int_pt = except_(ValueError, use='args[0]')(int) + >>> safe_int_pt('five') + 'five' + + """ + + def decorate(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except exceptions: + try: + return eval(use) + except TypeError: + return replace + + return wrapper + + return decorate diff --git a/pkg_resources/_vendor/jaraco/text/Lorem ipsum.txt b/pkg_resources/_vendor/jaraco/text/Lorem ipsum.txt new file mode 100644 index 00000000..986f944b --- /dev/null +++ b/pkg_resources/_vendor/jaraco/text/Lorem ipsum.txt @@ -0,0 +1,2 @@ +Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. +Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus magna felis sollicitudin mauris. Integer in mauris eu nibh euismod gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue, eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis, neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis, molestie eu, feugiat in, orci. In hac habitasse platea dictumst. diff --git a/pkg_resources/_vendor/jaraco/text/__init__.py b/pkg_resources/_vendor/jaraco/text/__init__.py new file mode 100644 index 00000000..c466378c --- /dev/null +++ b/pkg_resources/_vendor/jaraco/text/__init__.py @@ -0,0 +1,599 @@ +import re +import itertools +import textwrap +import functools + +try: + from importlib.resources import files # type: ignore +except ImportError: # pragma: nocover + from pkg_resources.extern.importlib_resources import files # type: ignore + +from pkg_resources.extern.jaraco.functools import compose, method_cache +from pkg_resources.extern.jaraco.context import ExceptionTrap + + +def substitution(old, new): + """ + Return a function that will perform a substitution on a string + """ + return lambda s: s.replace(old, new) + + +def multi_substitution(*substitutions): + """ + Take a sequence of pairs specifying substitutions, and create + a function that performs those substitutions. + + >>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo') + 'baz' + """ + substitutions = itertools.starmap(substitution, substitutions) + # compose function applies last function first, so reverse the + # substitutions to get the expected order. + substitutions = reversed(tuple(substitutions)) + return compose(*substitutions) + + +class FoldedCase(str): + """ + A case insensitive string class; behaves just like str + except compares equal when the only variation is case. + + >>> s = FoldedCase('hello world') + + >>> s == 'Hello World' + True + + >>> 'Hello World' == s + True + + >>> s != 'Hello World' + False + + >>> s.index('O') + 4 + + >>> s.split('O') + ['hell', ' w', 'rld'] + + >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta'])) + ['alpha', 'Beta', 'GAMMA'] + + Sequence membership is straightforward. + + >>> "Hello World" in [s] + True + >>> s in ["Hello World"] + True + + You may test for set inclusion, but candidate and elements + must both be folded. + + >>> FoldedCase("Hello World") in {s} + True + >>> s in {FoldedCase("Hello World")} + True + + String inclusion works as long as the FoldedCase object + is on the right. + + >>> "hello" in FoldedCase("Hello World") + True + + But not if the FoldedCase object is on the left: + + >>> FoldedCase('hello') in 'Hello World' + False + + In that case, use ``in_``: + + >>> FoldedCase('hello').in_('Hello World') + True + + >>> FoldedCase('hello') > FoldedCase('Hello') + False + """ + + def __lt__(self, other): + return self.lower() < other.lower() + + def __gt__(self, other): + return self.lower() > other.lower() + + def __eq__(self, other): + return self.lower() == other.lower() + + def __ne__(self, other): + return self.lower() != other.lower() + + def __hash__(self): + return hash(self.lower()) + + def __contains__(self, other): + return super().lower().__contains__(other.lower()) + + def in_(self, other): + "Does self appear in other?" + return self in FoldedCase(other) + + # cache lower since it's likely to be called frequently. + @method_cache + def lower(self): + return super().lower() + + def index(self, sub): + return self.lower().index(sub.lower()) + + def split(self, splitter=' ', maxsplit=0): + pattern = re.compile(re.escape(splitter), re.I) + return pattern.split(self, maxsplit) + + +# Python 3.8 compatibility +_unicode_trap = ExceptionTrap(UnicodeDecodeError) + + +@_unicode_trap.passes +def is_decodable(value): + r""" + Return True if the supplied value is decodable (using the default + encoding). + + >>> is_decodable(b'\xff') + False + >>> is_decodable(b'\x32') + True + """ + value.decode() + + +def is_binary(value): + r""" + Return True if the value appears to be binary (that is, it's a byte + string and isn't decodable). + + >>> is_binary(b'\xff') + True + >>> is_binary('\xff') + False + """ + return isinstance(value, bytes) and not is_decodable(value) + + +def trim(s): + r""" + Trim something like a docstring to remove the whitespace that + is common due to indentation and formatting. + + >>> trim("\n\tfoo = bar\n\t\tbar = baz\n") + 'foo = bar\n\tbar = baz' + """ + return textwrap.dedent(s).strip() + + +def wrap(s): + """ + Wrap lines of text, retaining existing newlines as + paragraph markers. + + >>> print(wrap(lorem_ipsum)) + Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do + eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad + minim veniam, quis nostrud exercitation ullamco laboris nisi ut + aliquip ex ea commodo consequat. Duis aute irure dolor in + reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla + pariatur. Excepteur sint occaecat cupidatat non proident, sunt in + culpa qui officia deserunt mollit anim id est laborum. + <BLANKLINE> + Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam + varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus + magna felis sollicitudin mauris. Integer in mauris eu nibh euismod + gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis + risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue, + eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas + fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla + a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis, + neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing + sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque + nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus + quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis, + molestie eu, feugiat in, orci. In hac habitasse platea dictumst. + """ + paragraphs = s.splitlines() + wrapped = ('\n'.join(textwrap.wrap(para)) for para in paragraphs) + return '\n\n'.join(wrapped) + + +def unwrap(s): + r""" + Given a multi-line string, return an unwrapped version. + + >>> wrapped = wrap(lorem_ipsum) + >>> wrapped.count('\n') + 20 + >>> unwrapped = unwrap(wrapped) + >>> unwrapped.count('\n') + 1 + >>> print(unwrapped) + Lorem ipsum dolor sit amet, consectetur adipiscing ... + Curabitur pretium tincidunt lacus. Nulla gravida orci ... + + """ + paragraphs = re.split(r'\n\n+', s) + cleaned = (para.replace('\n', ' ') for para in paragraphs) + return '\n'.join(cleaned) + + + + +class Splitter(object): + """object that will split a string with the given arguments for each call + + >>> s = Splitter(',') + >>> s('hello, world, this is your, master calling') + ['hello', ' world', ' this is your', ' master calling'] + """ + + def __init__(self, *args): + self.args = args + + def __call__(self, s): + return s.split(*self.args) + + +def indent(string, prefix=' ' * 4): + """ + >>> indent('foo') + ' foo' + """ + return prefix + string + + +class WordSet(tuple): + """ + Given an identifier, return the words that identifier represents, + whether in camel case, underscore-separated, etc. + + >>> WordSet.parse("camelCase") + ('camel', 'Case') + + >>> WordSet.parse("under_sep") + ('under', 'sep') + + Acronyms should be retained + + >>> WordSet.parse("firstSNL") + ('first', 'SNL') + + >>> WordSet.parse("you_and_I") + ('you', 'and', 'I') + + >>> WordSet.parse("A simple test") + ('A', 'simple', 'test') + + Multiple caps should not interfere with the first cap of another word. + + >>> WordSet.parse("myABCClass") + ('my', 'ABC', 'Class') + + The result is a WordSet, so you can get the form you need. + + >>> WordSet.parse("myABCClass").underscore_separated() + 'my_ABC_Class' + + >>> WordSet.parse('a-command').camel_case() + 'ACommand' + + >>> WordSet.parse('someIdentifier').lowered().space_separated() + 'some identifier' + + Slices of the result should return another WordSet. + + >>> WordSet.parse('taken-out-of-context')[1:].underscore_separated() + 'out_of_context' + + >>> WordSet.from_class_name(WordSet()).lowered().space_separated() + 'word set' + + >>> example = WordSet.parse('figured it out') + >>> example.headless_camel_case() + 'figuredItOut' + >>> example.dash_separated() + 'figured-it-out' + + """ + + _pattern = re.compile('([A-Z]?[a-z]+)|([A-Z]+(?![a-z]))') + + def capitalized(self): + return WordSet(word.capitalize() for word in self) + + def lowered(self): + return WordSet(word.lower() for word in self) + + def camel_case(self): + return ''.join(self.capitalized()) + + def headless_camel_case(self): + words = iter(self) + first = next(words).lower() + new_words = itertools.chain((first,), WordSet(words).camel_case()) + return ''.join(new_words) + + def underscore_separated(self): + return '_'.join(self) + + def dash_separated(self): + return '-'.join(self) + + def space_separated(self): + return ' '.join(self) + + def trim_right(self, item): + """ + Remove the item from the end of the set. + + >>> WordSet.parse('foo bar').trim_right('foo') + ('foo', 'bar') + >>> WordSet.parse('foo bar').trim_right('bar') + ('foo',) + >>> WordSet.parse('').trim_right('bar') + () + """ + return self[:-1] if self and self[-1] == item else self + + def trim_left(self, item): + """ + Remove the item from the beginning of the set. + + >>> WordSet.parse('foo bar').trim_left('foo') + ('bar',) + >>> WordSet.parse('foo bar').trim_left('bar') + ('foo', 'bar') + >>> WordSet.parse('').trim_left('bar') + () + """ + return self[1:] if self and self[0] == item else self + + def trim(self, item): + """ + >>> WordSet.parse('foo bar').trim('foo') + ('bar',) + """ + return self.trim_left(item).trim_right(item) + + def __getitem__(self, item): + result = super(WordSet, self).__getitem__(item) + if isinstance(item, slice): + result = WordSet(result) + return result + + @classmethod + def parse(cls, identifier): + matches = cls._pattern.finditer(identifier) + return WordSet(match.group(0) for match in matches) + + @classmethod + def from_class_name(cls, subject): + return cls.parse(subject.__class__.__name__) + + +# for backward compatibility +words = WordSet.parse + + +def simple_html_strip(s): + r""" + Remove HTML from the string `s`. + + >>> str(simple_html_strip('')) + '' + + >>> print(simple_html_strip('A <bold>stormy</bold> day in paradise')) + A stormy day in paradise + + >>> print(simple_html_strip('Somebody <!-- do not --> tell the truth.')) + Somebody tell the truth. + + >>> print(simple_html_strip('What about<br/>\nmultiple lines?')) + What about + multiple lines? + """ + html_stripper = re.compile('(<!--.*?-->)|(<[^>]*>)|([^<]+)', re.DOTALL) + texts = (match.group(3) or '' for match in html_stripper.finditer(s)) + return ''.join(texts) + + +class SeparatedValues(str): + """ + A string separated by a separator. Overrides __iter__ for getting + the values. + + >>> list(SeparatedValues('a,b,c')) + ['a', 'b', 'c'] + + Whitespace is stripped and empty values are discarded. + + >>> list(SeparatedValues(' a, b , c, ')) + ['a', 'b', 'c'] + """ + + separator = ',' + + def __iter__(self): + parts = self.split(self.separator) + return filter(None, (part.strip() for part in parts)) + + +class Stripper: + r""" + Given a series of lines, find the common prefix and strip it from them. + + >>> lines = [ + ... 'abcdefg\n', + ... 'abc\n', + ... 'abcde\n', + ... ] + >>> res = Stripper.strip_prefix(lines) + >>> res.prefix + 'abc' + >>> list(res.lines) + ['defg\n', '\n', 'de\n'] + + If no prefix is common, nothing should be stripped. + + >>> lines = [ + ... 'abcd\n', + ... '1234\n', + ... ] + >>> res = Stripper.strip_prefix(lines) + >>> res.prefix = '' + >>> list(res.lines) + ['abcd\n', '1234\n'] + """ + + def __init__(self, prefix, lines): + self.prefix = prefix + self.lines = map(self, lines) + + @classmethod + def strip_prefix(cls, lines): + prefix_lines, lines = itertools.tee(lines) + prefix = functools.reduce(cls.common_prefix, prefix_lines) + return cls(prefix, lines) + + def __call__(self, line): + if not self.prefix: + return line + null, prefix, rest = line.partition(self.prefix) + return rest + + @staticmethod + def common_prefix(s1, s2): + """ + Return the common prefix of two lines. + """ + index = min(len(s1), len(s2)) + while s1[:index] != s2[:index]: + index -= 1 + return s1[:index] + + +def remove_prefix(text, prefix): + """ + Remove the prefix from the text if it exists. + + >>> remove_prefix('underwhelming performance', 'underwhelming ') + 'performance' + + >>> remove_prefix('something special', 'sample') + 'something special' + """ + null, prefix, rest = text.rpartition(prefix) + return rest + + +def remove_suffix(text, suffix): + """ + Remove the suffix from the text if it exists. + + >>> remove_suffix('name.git', '.git') + 'name' + + >>> remove_suffix('something special', 'sample') + 'something special' + """ + rest, suffix, null = text.partition(suffix) + return rest + + +def normalize_newlines(text): + r""" + Replace alternate newlines with the canonical newline. + + >>> normalize_newlines('Lorem Ipsum\u2029') + 'Lorem Ipsum\n' + >>> normalize_newlines('Lorem Ipsum\r\n') + 'Lorem Ipsum\n' + >>> normalize_newlines('Lorem Ipsum\x85') + 'Lorem Ipsum\n' + """ + newlines = ['\r\n', '\r', '\n', '\u0085', '\u2028', '\u2029'] + pattern = '|'.join(newlines) + return re.sub(pattern, '\n', text) + + +def _nonblank(str): + return str and not str.startswith('#') + + +@functools.singledispatch +def yield_lines(iterable): + r""" + Yield valid lines of a string or iterable. + + >>> list(yield_lines('')) + [] + >>> list(yield_lines(['foo', 'bar'])) + ['foo', 'bar'] + >>> list(yield_lines('foo\nbar')) + ['foo', 'bar'] + >>> list(yield_lines('\nfoo\n#bar\nbaz #comment')) + ['foo', 'baz #comment'] + >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n'])) + ['foo', 'bar', 'baz', 'bing'] + """ + return itertools.chain.from_iterable(map(yield_lines, iterable)) + + +@yield_lines.register(str) +def _(text): + return filter(_nonblank, map(str.strip, text.splitlines())) + + +def drop_comment(line): + """ + Drop comments. + + >>> drop_comment('foo # bar') + 'foo' + + A hash without a space may be in a URL. + + >>> drop_comment('http://example.com/foo#bar') + 'http://example.com/foo#bar' + """ + return line.partition(' #')[0] + + +def join_continuation(lines): + r""" + Join lines continued by a trailing backslash. + + >>> list(join_continuation(['foo \\', 'bar', 'baz'])) + ['foobar', 'baz'] + >>> list(join_continuation(['foo \\', 'bar', 'baz'])) + ['foobar', 'baz'] + >>> list(join_continuation(['foo \\', 'bar \\', 'baz'])) + ['foobarbaz'] + + Not sure why, but... + The character preceeding the backslash is also elided. + + >>> list(join_continuation(['goo\\', 'dly'])) + ['godly'] + + A terrible idea, but... + If no line is available to continue, suppress the lines. + + >>> list(join_continuation(['foo', 'bar\\', 'baz\\'])) + ['foo'] + """ + lines = iter(lines) + for item in lines: + while item.endswith('\\'): + try: + item = item[:-2].strip() + next(lines) + except StopIteration: + return + yield item diff --git a/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/INSTALLER b/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/LICENSE b/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/LICENSE new file mode 100644 index 00000000..0a523bec --- /dev/null +++ b/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2012 Erik Rose + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/METADATA b/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/METADATA new file mode 100644 index 00000000..9efacdd7 --- /dev/null +++ b/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/METADATA @@ -0,0 +1,521 @@ +Metadata-Version: 2.1 +Name: more-itertools +Version: 8.12.0 +Summary: More routines for operating on iterables, beyond itertools +Home-page: https://github.com/more-itertools/more-itertools +Author: Erik Rose +Author-email: erikrose@grinchcentral.com +License: MIT +Keywords: itertools,iterator,iteration,filter,peek,peekable,collate,chunk,chunked +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries +Requires-Python: >=3.5 +Description-Content-Type: text/x-rst +License-File: LICENSE + +============== +More Itertools +============== + +.. image:: https://readthedocs.org/projects/more-itertools/badge/?version=latest + :target: https://more-itertools.readthedocs.io/en/stable/ + +Python's ``itertools`` library is a gem - you can compose elegant solutions +for a variety of problems with the functions it provides. In ``more-itertools`` +we collect additional building blocks, recipes, and routines for working with +Python iterables. + ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Grouping | `chunked <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.chunked>`_, | +| | `ichunked <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ichunked>`_, | +| | `sliced <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sliced>`_, | +| | `distribute <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distribute>`_, | +| | `divide <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.divide>`_, | +| | `split_at <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_at>`_, | +| | `split_before <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_before>`_, | +| | `split_after <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_after>`_, | +| | `split_into <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_into>`_, | +| | `split_when <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_when>`_, | +| | `bucket <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.bucket>`_, | +| | `unzip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unzip>`_, | +| | `grouper <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.grouper>`_, | +| | `partition <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.partition>`_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Lookahead and lookback | `spy <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.spy>`_, | +| | `peekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.peekable>`_, | +| | `seekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.seekable>`_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Windowing | `windowed <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.windowed>`_, | +| | `substrings <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.substrings>`_, | +| | `substrings_indexes <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.substrings_indexes>`_, | +| | `stagger <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.stagger>`_, | +| | `windowed_complete <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.windowed_complete>`_, | +| | `pairwise <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.pairwise>`_, | +| | `triplewise <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.triplewise>`_, | +| | `sliding_window <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sliding_window>`_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Augmenting | `count_cycle <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.count_cycle>`_, | +| | `intersperse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.intersperse>`_, | +| | `padded <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.padded>`_, | +| | `mark_ends <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.mark_ends>`_, | +| | `repeat_last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.repeat_last>`_, | +| | `adjacent <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.adjacent>`_, | +| | `groupby_transform <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.groupby_transform>`_, | +| | `pad_none <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.pad_none>`_, | +| | `ncycles <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ncycles>`_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Combining | `collapse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.collapse>`_, | +| | `sort_together <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sort_together>`_, | +| | `interleave <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave>`_, | +| | `interleave_longest <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave_longest>`_, | +| | `interleave_evenly <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave_evenly>`_, | +| | `zip_offset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_offset>`_, | +| | `zip_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_equal>`_, | +| | `zip_broadcast <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_broadcast>`_, | +| | `dotproduct <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.dotproduct>`_, | +| | `convolve <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.convolve>`_, | +| | `flatten <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.flatten>`_, | +| | `roundrobin <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.roundrobin>`_, | +| | `prepend <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.prepend>`_, | +| | `value_chain <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.value_chain>`_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Summarizing | `ilen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ilen>`_, | +| | `unique_to_each <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_to_each>`_, | +| | `sample <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sample>`_, | +| | `consecutive_groups <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consecutive_groups>`_, | +| | `run_length <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.run_length>`_, | +| | `map_reduce <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.map_reduce>`_, | +| | `exactly_n <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.exactly_n>`_, | +| | `is_sorted <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.is_sorted>`_, | +| | `all_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.all_equal>`_, | +| | `all_unique <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.all_unique>`_, | +| | `minmax <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.minmax>`_, | +| | `first_true <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first_true>`_, | +| | `quantify <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.quantify>`_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Selecting | `islice_extended <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.islice_extended>`_, | +| | `first <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first>`_, | +| | `last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.last>`_, | +| | `one <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.one>`_, | +| | `only <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.only>`_, | +| | `strictly_n <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.strictly_n>`_, | +| | `strip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.strip>`_, | +| | `lstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.lstrip>`_, | +| | `rstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rstrip>`_, | +| | `filter_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.filter_except>`_, | +| | `map_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.map_except>`_, | +| | `nth_or_last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_or_last>`_, | +| | `unique_in_window <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_in_window>`_, | +| | `before_and_after <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.before_and_after>`_, | +| | `nth <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth>`_, | +| | `take <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.take>`_, | +| | `tail <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tail>`_, | +| | `unique_everseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertoo ls.unique_everseen>`_, | +| | `unique_justseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_justseen>`_, | +| | `duplicates_everseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.duplicates_everseen>`_, | +| | `duplicates_justseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.duplicates_justseen>`_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Combinatorics | `distinct_permutations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_permutations>`_, | +| | `distinct_combinations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_combinations>`_, | +| | `circular_shifts <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.circular_shifts>`_, | +| | `partitions <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.partitions>`_, | +| | `set_partitions <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.set_partitions>`_, | +| | `product_index <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.product_index>`_, | +| | `combination_index <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.combination_index>`_, | +| | `permutation_index <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.permutation_index>`_, | +| | `powerset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.powerset>`_, | +| | `random_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_product>`_, | +| | `random_permutation <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_permutation>`_, | +| | `random_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination>`_, | +| | `random_combination_with_replacement <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination_with_replacement>`_, | +| | `nth_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_product>`_, | +| | `nth_permutation <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_permutation>`_, | +| | `nth_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_combination>`_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Wrapping | `always_iterable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_iterable>`_, | +| | `always_reversible <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_reversible>`_, | +| | `countable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.countable>`_, | +| | `consumer <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consumer>`_, | +| | `with_iter <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.with_iter>`_, | +| | `iter_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iter_except>`_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Others | `locate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.locate>`_, | +| | `rlocate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rlocate>`_, | +| | `replace <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.replace>`_, | +| | `numeric_range <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.numeric_range>`_, | +| | `side_effect <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.side_effect>`_, | +| | `iterate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iterate>`_, | +| | `difference <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.difference>`_, | +| | `make_decorator <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.make_decorator>`_, | +| | `SequenceView <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.SequenceView>`_, | +| | `time_limited <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.time_limited>`_, | +| | `consume <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consume>`_, | +| | `tabulate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tabulate>`_, | +| | `repeatfunc <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.repeatfunc>`_ | ++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + + +Getting started +=============== + +To get started, install the library with `pip <https://pip.pypa.io/en/stable/>`_: + +.. code-block:: shell + + pip install more-itertools + +The recipes from the `itertools docs <https://docs.python.org/3/library/itertools.html#itertools-recipes>`_ +are included in the top-level package: + +.. code-block:: python + + >>> from more_itertools import flatten + >>> iterable = [(0, 1), (2, 3)] + >>> list(flatten(iterable)) + [0, 1, 2, 3] + +Several new recipes are available as well: + +.. code-block:: python + + >>> from more_itertools import chunked + >>> iterable = [0, 1, 2, 3, 4, 5, 6, 7, 8] + >>> list(chunked(iterable, 3)) + [[0, 1, 2], [3, 4, 5], [6, 7, 8]] + + >>> from more_itertools import spy + >>> iterable = (x * x for x in range(1, 6)) + >>> head, iterable = spy(iterable, n=3) + >>> list(head) + [1, 4, 9] + >>> list(iterable) + [1, 4, 9, 16, 25] + + + +For the full listing of functions, see the `API documentation <https://more-itertools.readthedocs.io/en/stable/api.html>`_. + + +Links elsewhere +=============== + +Blog posts about ``more-itertools``: + +* `Yo, I heard you like decorators <https://www.bbayles.com/index/decorator_factory>`__ +* `Tour of Python Itertools <https://martinheinz.dev/blog/16>`__ (`Alternate <https://dev.to/martinheinz/tour-of-python-itertools-4122>`__) +* `Real-World Python More Itertools <https://www.gidware.com/real-world-more-itertools/>`_ + + +Development +=========== + +``more-itertools`` is maintained by `@erikrose <https://github.com/erikrose>`_ +and `@bbayles <https://github.com/bbayles>`_, with help from `many others <https://github.com/more-itertools/more-itertools/graphs/contributors>`_. +If you have a problem or suggestion, please file a bug or pull request in this +repository. Thanks for contributing! + + +Version History +=============== + + + :noindex: + +8.12.0 +------ + +* Bug fixes + * Some documentation issues were fixed (thanks to Masynchin, spookylukey, astrojuanlu, and stephengmatthews) + * Python 3.5 support was temporarily restored (thanks to mattbonnell) + +8.11.0 +------ + +* New functions + * The before_and_after, sliding_window, and triplewise recipes from the Python 3.10 docs were added + * duplicates_everseen and duplicates_justseen (thanks to OrBin and DavidPratt512) + * minmax (thanks to Ricocotam, MSeifert04, and ruancomelli) + * strictly_n (thanks to hwalinga and NotWearingPants) + * unique_in_window + +* Changes to existing functions + * groupby_transform had its type stub improved (thanks to mjk4 and ruancomelli) + * is_sorted now accepts a ``strict`` parameter (thanks to Dutcho and ruancomelli) + * zip_broadcast was updated to fix a bug (thanks to kalekundert) + +8.10.0 +------ + +* Changes to existing functions + * The type stub for iter_except was improved (thanks to MarcinKonowalczyk) + +* Other changes: + * Type stubs now ship with the source release (thanks to saaketp) + * The Sphinx docs were improved (thanks to MarcinKonowalczyk) + +8.9.0 +----- + +* New functions + * interleave_evenly (thanks to mbugert) + * repeat_each (thanks to FinalSh4re) + * chunked_even (thanks to valtron) + * map_if (thanks to sassbalint) + * zip_broadcast (thanks to kalekundert) + +* Changes to existing functions + * The type stub for chunked was improved (thanks to PhilMacKay) + * The type stubs for zip_equal and `zip_offset` were improved (thanks to maffoo) + * Building Sphinx docs locally was improved (thanks to MarcinKonowalczyk) + +8.8.0 +----- + +* New functions + * countable (thanks to krzysieq) + +* Changes to existing functions + * split_before was updated to handle empy collections (thanks to TiunovNN) + * unique_everseen got a performance boost (thanks to Numerlor) + * The type hint for value_chain was corrected (thanks to vr2262) + +8.7.0 +----- + +* New functions + * convolve (from the Python itertools docs) + * product_index, combination_index, and permutation_index (thanks to N8Brooks) + * value_chain (thanks to jenstroeger) + +* Changes to existing functions + * distinct_combinations now uses a non-recursive algorithm (thanks to knutdrand) + * pad_none is now the preferred name for padnone, though the latter remains available. + * pairwise will now use the Python standard library implementation on Python 3.10+ + * sort_together now accepts a ``key`` argument (thanks to brianmaissy) + * seekable now has a ``peek`` method, and can indicate whether the iterator it's wrapping is exhausted (thanks to gsakkis) + * time_limited can now indicate whether its iterator has expired (thanks to roysmith) + * The implementation of unique_everseen was improved (thanks to plammens) + +* Other changes: + * Various documentation updates (thanks to cthoyt, Evantm, and cyphase) + +8.6.0 +----- + +* New itertools + * all_unique (thanks to brianmaissy) + * nth_product and nth_permutation (thanks to N8Brooks) + +* Changes to existing itertools + * chunked and sliced now accept a ``strict`` parameter (thanks to shlomif and jtwool) + +* Other changes + * Python 3.5 has reached its end of life and is no longer supported. + * Python 3.9 is officially supported. + * Various documentation fixes (thanks to timgates42) + +8.5.0 +----- + +* New itertools + * windowed_complete (thanks to MarcinKonowalczyk) + +* Changes to existing itertools: + * The is_sorted implementation was improved (thanks to cool-RR) + * The groupby_transform now accepts a ``reducefunc`` parameter. + * The last implementation was improved (thanks to brianmaissy) + +* Other changes + * Various documentation fixes (thanks to craigrosie, samuelstjean, PiCT0) + * The tests for distinct_combinations were improved (thanks to Minabsapi) + * Automated tests now run on GitHub Actions. All commits now check: + * That unit tests pass + * That the examples in docstrings work + * That test coverage remains high (using `coverage`) + * For linting errors (using `flake8`) + * For consistent style (using `black`) + * That the type stubs work (using `mypy`) + * That the docs build correctly (using `sphinx`) + * That packages build correctly (using `twine`) + +8.4.0 +----- + +* New itertools + * mark_ends (thanks to kalekundert) + * is_sorted + +* Changes to existing itertools: + * islice_extended can now be used with real slices (thanks to cool-RR) + * The implementations for filter_except and map_except were improved (thanks to SergBobrovsky) + +* Other changes + * Automated tests now enforce code style (using `black <https://github.com/psf/black>`__) + * The various signatures of islice_extended and numeric_range now appear in the docs (thanks to dsfulf) + * The test configuration for mypy was updated (thanks to blueyed) + + +8.3.0 +----- + +* New itertools + * zip_equal (thanks to frankier and alexmojaki) + +* Changes to existing itertools: + * split_at, split_before, split_after, and split_when all got a ``maxsplit`` paramter (thanks to jferard and ilai-deutel) + * split_at now accepts a ``keep_separator`` parameter (thanks to jferard) + * distinct_permutations can now generate ``r``-length permutations (thanks to SergBobrovsky and ilai-deutel) + * The windowed implementation was improved (thanks to SergBobrovsky) + * The spy implementation was improved (thanks to has2k1) + +* Other changes + * Type stubs are now tested with ``stubtest`` (thanks to ilai-deutel) + * Tests now run with ``python -m unittest`` instead of ``python setup.py test`` (thanks to jdufresne) + +8.2.0 +----- + +* Bug fixes + * The .pyi files for typing were updated. (thanks to blueyed and ilai-deutel) + +* Changes to existing itertools: + * numeric_range now behaves more like the built-in range. (thanks to jferard) + * bucket now allows for enumerating keys. (thanks to alexchandel) + * sliced now should now work for numpy arrays. (thanks to sswingle) + * seekable now has a ``maxlen`` parameter. + +8.1.0 +----- + +* Bug fixes + * partition works with ``pred=None`` again. (thanks to MSeifert04) + +* New itertools + * sample (thanks to tommyod) + * nth_or_last (thanks to d-ryzhikov) + +* Changes to existing itertools: + * The implementation for divide was improved. (thanks to jferard) + +8.0.2 +----- + +* Bug fixes + * The type stub files are now part of the wheel distribution (thanks to keisheiled) + +8.0.1 +----- + +* Bug fixes + * The type stub files now work for functions imported from the + root package (thanks to keisheiled) + +8.0.0 +----- + +* New itertools and other additions + * This library now ships type hints for use with mypy. + (thanks to ilai-deutel for the implementation, and to gabbard and fmagin for assistance) + * split_when (thanks to jferard) + * repeat_last (thanks to d-ryzhikov) + +* Changes to existing itertools: + * The implementation for set_partitions was improved. (thanks to jferard) + * partition was optimized for expensive predicates. (thanks to stevecj) + * unique_everseen and groupby_transform were re-factored. (thanks to SergBobrovsky) + * The implementation for difference was improved. (thanks to Jabbey92) + +* Other changes + * Python 3.4 has reached its end of life and is no longer supported. + * Python 3.8 is officially supported. (thanks to jdufresne) + * The ``collate`` function has been deprecated. + It raises a ``DeprecationWarning`` if used, and will be removed in a future release. + * one and only now provide more informative error messages. (thanks to gabbard) + * Unit tests were moved outside of the main package (thanks to jdufresne) + * Various documentation fixes (thanks to kriomant, gabbard, jdufresne) + + +7.2.0 +----- + +* New itertools + * distinct_combinations + * set_partitions (thanks to kbarrett) + * filter_except + * map_except + +7.1.0 +----- + +* New itertools + * ichunked (thanks davebelais and youtux) + * only (thanks jaraco) + +* Changes to existing itertools: + * numeric_range now supports ranges specified by + ``datetime.datetime`` and ``datetime.timedelta`` objects (thanks to MSeifert04 for tests). + * difference now supports an *initial* keyword argument. + + +* Other changes + * Various documentation fixes (thanks raimon49, pylang) + +7.0.0 +----- + +* New itertools: + * time_limited + * partitions (thanks to rominf and Saluev) + * substrings_indexes (thanks to rominf) + +* Changes to existing itertools: + * collapse now treats ``bytes`` objects the same as ``str`` objects. (thanks to Sweenpet) + +The major version update is due to the change in the default behavior of +collapse. It now treats ``bytes`` objects the same as ``str`` objects. +This aligns its behavior with always_iterable. + +.. code-block:: python + + >>> from more_itertools import collapse + >>> iterable = [[1, 2], b'345', [6]] + >>> print(list(collapse(iterable))) + [1, 2, b'345', 6] + +6.0.0 +----- + +* Major changes: + * Python 2.7 is no longer supported. The 5.0.0 release will be the last + version targeting Python 2.7. + * All future releases will target the active versions of Python 3. + As of 2019, those are Python 3.4 and above. + * The ``six`` library is no longer a dependency. + * The accumulate function is no longer part of this library. You + may import a better version from the standard ``itertools`` module. + +* Changes to existing itertools: + * The order of the parameters in grouper have changed to match + the latest recipe in the itertools documentation. Use of the old order + will be supported in this release, but emit a ``DeprecationWarning``. + The legacy behavior will be dropped in a future release. (thanks to jaraco) + * distinct_permutations was improved (thanks to jferard - see also `permutations with unique values <https://stackoverflow.com/questions/6284396/permutations-with-unique-values>`_ at StackOverflow.) + * An unused parameter was removed from substrings. (thanks to pylang) + +* Other changes: + * The docs for unique_everseen were improved. (thanks to jferard and MSeifert04) + * Several Python 2-isms were removed. (thanks to jaraco, MSeifert04, and hugovk) + + diff --git a/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/RECORD b/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/RECORD new file mode 100644 index 00000000..44847291 --- /dev/null +++ b/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/RECORD @@ -0,0 +1,16 @@ +more_itertools-8.12.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+more_itertools-8.12.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053
+more_itertools-8.12.0.dist-info/METADATA,sha256=QCCEcisEPr7iSfBIKCukhP-FbG9ehMK8tDIliZ3FBDc,39405
+more_itertools-8.12.0.dist-info/RECORD,,
+more_itertools-8.12.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
+more_itertools-8.12.0.dist-info/top_level.txt,sha256=fAuqRXu9LPhxdB9ujJowcFOu1rZ8wzSpOW9_jlKis6M,15
+more_itertools/__init__.py,sha256=ZQYu_9H6stSG7viUgT32TFqslqcZwq82kWRZooKiI8Y,83
+more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43
+more_itertools/__pycache__/__init__.cpython-310.pyc,,
+more_itertools/__pycache__/more.cpython-310.pyc,,
+more_itertools/__pycache__/recipes.cpython-310.pyc,,
+more_itertools/more.py,sha256=jSrvV9BK-XKa4x7MPPp9yWYRDtRgR5h7yryEqHMU4mg,132578
+more_itertools/more.pyi,sha256=kWOkRKx0V8ZwC1D2j0c0DUfy56dazzpmRcm5ZuY_aqo,20006
+more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+more_itertools/recipes.py,sha256=N6aCDwoIPvE-aiqpGU-nbFwqiM3X8MKRcxBM84naW88,18410
+more_itertools/recipes.pyi,sha256=Lx3vb0p_vY7rF8MQuguvOcVaS9qd1WRL8JO_qVo7hiY,3925
diff --git a/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/WHEEL b/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/WHEEL new file mode 100644 index 00000000..5bad85fd --- /dev/null +++ b/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/top_level.txt b/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/top_level.txt new file mode 100644 index 00000000..a5035bef --- /dev/null +++ b/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/top_level.txt @@ -0,0 +1 @@ +more_itertools diff --git a/pkg_resources/_vendor/more_itertools/__init__.py b/pkg_resources/_vendor/more_itertools/__init__.py new file mode 100644 index 00000000..ea38bef1 --- /dev/null +++ b/pkg_resources/_vendor/more_itertools/__init__.py @@ -0,0 +1,4 @@ +from .more import * # noqa +from .recipes import * # noqa + +__version__ = '8.12.0' diff --git a/pkg_resources/_vendor/more_itertools/__init__.pyi b/pkg_resources/_vendor/more_itertools/__init__.pyi new file mode 100644 index 00000000..96f6e36c --- /dev/null +++ b/pkg_resources/_vendor/more_itertools/__init__.pyi @@ -0,0 +1,2 @@ +from .more import * +from .recipes import * diff --git a/pkg_resources/_vendor/more_itertools/more.py b/pkg_resources/_vendor/more_itertools/more.py new file mode 100644 index 00000000..6b6a5cab --- /dev/null +++ b/pkg_resources/_vendor/more_itertools/more.py @@ -0,0 +1,4316 @@ +import warnings + +from collections import Counter, defaultdict, deque, abc +from collections.abc import Sequence +from functools import partial, reduce, wraps +from heapq import merge, heapify, heapreplace, heappop +from itertools import ( + chain, + compress, + count, + cycle, + dropwhile, + groupby, + islice, + repeat, + starmap, + takewhile, + tee, + zip_longest, +) +from math import exp, factorial, floor, log +from queue import Empty, Queue +from random import random, randrange, uniform +from operator import itemgetter, mul, sub, gt, lt, ge, le +from sys import hexversion, maxsize +from time import monotonic + +from .recipes import ( + consume, + flatten, + pairwise, + powerset, + take, + unique_everseen, +) + +__all__ = [ + 'AbortThread', + 'SequenceView', + 'UnequalIterablesError', + 'adjacent', + 'all_unique', + 'always_iterable', + 'always_reversible', + 'bucket', + 'callback_iter', + 'chunked', + 'chunked_even', + 'circular_shifts', + 'collapse', + 'collate', + 'combination_index', + 'consecutive_groups', + 'consumer', + 'count_cycle', + 'countable', + 'difference', + 'distinct_combinations', + 'distinct_permutations', + 'distribute', + 'divide', + 'duplicates_everseen', + 'duplicates_justseen', + 'exactly_n', + 'filter_except', + 'first', + 'groupby_transform', + 'ichunked', + 'ilen', + 'interleave', + 'interleave_evenly', + 'interleave_longest', + 'intersperse', + 'is_sorted', + 'islice_extended', + 'iterate', + 'last', + 'locate', + 'lstrip', + 'make_decorator', + 'map_except', + 'map_if', + 'map_reduce', + 'mark_ends', + 'minmax', + 'nth_or_last', + 'nth_permutation', + 'nth_product', + 'numeric_range', + 'one', + 'only', + 'padded', + 'partitions', + 'peekable', + 'permutation_index', + 'product_index', + 'raise_', + 'repeat_each', + 'repeat_last', + 'replace', + 'rlocate', + 'rstrip', + 'run_length', + 'sample', + 'seekable', + 'set_partitions', + 'side_effect', + 'sliced', + 'sort_together', + 'split_after', + 'split_at', + 'split_before', + 'split_into', + 'split_when', + 'spy', + 'stagger', + 'strip', + 'strictly_n', + 'substrings', + 'substrings_indexes', + 'time_limited', + 'unique_in_window', + 'unique_to_each', + 'unzip', + 'value_chain', + 'windowed', + 'windowed_complete', + 'with_iter', + 'zip_broadcast', + 'zip_equal', + 'zip_offset', +] + + +_marker = object() + + +def chunked(iterable, n, strict=False): + """Break *iterable* into lists of length *n*: + + >>> list(chunked([1, 2, 3, 4, 5, 6], 3)) + [[1, 2, 3], [4, 5, 6]] + + By the default, the last yielded list will have fewer than *n* elements + if the length of *iterable* is not divisible by *n*: + + >>> list(chunked([1, 2, 3, 4, 5, 6, 7, 8], 3)) + [[1, 2, 3], [4, 5, 6], [7, 8]] + + To use a fill-in value instead, see the :func:`grouper` recipe. + + If the length of *iterable* is not divisible by *n* and *strict* is + ``True``, then ``ValueError`` will be raised before the last + list is yielded. + + """ + iterator = iter(partial(take, n, iter(iterable)), []) + if strict: + if n is None: + raise ValueError('n must not be None when using strict mode.') + + def ret(): + for chunk in iterator: + if len(chunk) != n: + raise ValueError('iterable is not divisible by n.') + yield chunk + + return iter(ret()) + else: + return iterator + + +def first(iterable, default=_marker): + """Return the first item of *iterable*, or *default* if *iterable* is + empty. + + >>> first([0, 1, 2, 3]) + 0 + >>> first([], 'some default') + 'some default' + + If *default* is not provided and there are no items in the iterable, + raise ``ValueError``. + + :func:`first` is useful when you have a generator of expensive-to-retrieve + values and want any arbitrary one. It is marginally shorter than + ``next(iter(iterable), default)``. + + """ + try: + return next(iter(iterable)) + except StopIteration as e: + if default is _marker: + raise ValueError( + 'first() was called on an empty iterable, and no ' + 'default value was provided.' + ) from e + return default + + +def last(iterable, default=_marker): + """Return the last item of *iterable*, or *default* if *iterable* is + empty. + + >>> last([0, 1, 2, 3]) + 3 + >>> last([], 'some default') + 'some default' + + If *default* is not provided and there are no items in the iterable, + raise ``ValueError``. + """ + try: + if isinstance(iterable, Sequence): + return iterable[-1] + # Work around https://bugs.python.org/issue38525 + elif hasattr(iterable, '__reversed__') and (hexversion != 0x030800F0): + return next(reversed(iterable)) + else: + return deque(iterable, maxlen=1)[-1] + except (IndexError, TypeError, StopIteration): + if default is _marker: + raise ValueError( + 'last() was called on an empty iterable, and no default was ' + 'provided.' + ) + return default + + +def nth_or_last(iterable, n, default=_marker): + """Return the nth or the last item of *iterable*, + or *default* if *iterable* is empty. + + >>> nth_or_last([0, 1, 2, 3], 2) + 2 + >>> nth_or_last([0, 1], 2) + 1 + >>> nth_or_last([], 0, 'some default') + 'some default' + + If *default* is not provided and there are no items in the iterable, + raise ``ValueError``. + """ + return last(islice(iterable, n + 1), default=default) + + +class peekable: + """Wrap an iterator to allow lookahead and prepending elements. + + Call :meth:`peek` on the result to get the value that will be returned + by :func:`next`. This won't advance the iterator: + + >>> p = peekable(['a', 'b']) + >>> p.peek() + 'a' + >>> next(p) + 'a' + + Pass :meth:`peek` a default value to return that instead of raising + ``StopIteration`` when the iterator is exhausted. + + >>> p = peekable([]) + >>> p.peek('hi') + 'hi' + + peekables also offer a :meth:`prepend` method, which "inserts" items + at the head of the iterable: + + >>> p = peekable([1, 2, 3]) + >>> p.prepend(10, 11, 12) + >>> next(p) + 10 + >>> p.peek() + 11 + >>> list(p) + [11, 12, 1, 2, 3] + + peekables can be indexed. Index 0 is the item that will be returned by + :func:`next`, index 1 is the item after that, and so on: + The values up to the given index will be cached. + + >>> p = peekable(['a', 'b', 'c', 'd']) + >>> p[0] + 'a' + >>> p[1] + 'b' + >>> next(p) + 'a' + + Negative indexes are supported, but be aware that they will cache the + remaining items in the source iterator, which may require significant + storage. + + To check whether a peekable is exhausted, check its truth value: + + >>> p = peekable(['a', 'b']) + >>> if p: # peekable has items + ... list(p) + ['a', 'b'] + >>> if not p: # peekable is exhausted + ... list(p) + [] + + """ + + def __init__(self, iterable): + self._it = iter(iterable) + self._cache = deque() + + def __iter__(self): + return self + + def __bool__(self): + try: + self.peek() + except StopIteration: + return False + return True + + def peek(self, default=_marker): + """Return the item that will be next returned from ``next()``. + + Return ``default`` if there are no items left. If ``default`` is not + provided, raise ``StopIteration``. + + """ + if not self._cache: + try: + self._cache.append(next(self._it)) + except StopIteration: + if default is _marker: + raise + return default + return self._cache[0] + + def prepend(self, *items): + """Stack up items to be the next ones returned from ``next()`` or + ``self.peek()``. The items will be returned in + first in, first out order:: + + >>> p = peekable([1, 2, 3]) + >>> p.prepend(10, 11, 12) + >>> next(p) + 10 + >>> list(p) + [11, 12, 1, 2, 3] + + It is possible, by prepending items, to "resurrect" a peekable that + previously raised ``StopIteration``. + + >>> p = peekable([]) + >>> next(p) + Traceback (most recent call last): + ... + StopIteration + >>> p.prepend(1) + >>> next(p) + 1 + >>> next(p) + Traceback (most recent call last): + ... + StopIteration + + """ + self._cache.extendleft(reversed(items)) + + def __next__(self): + if self._cache: + return self._cache.popleft() + + return next(self._it) + + def _get_slice(self, index): + # Normalize the slice's arguments + step = 1 if (index.step is None) else index.step + if step > 0: + start = 0 if (index.start is None) else index.start + stop = maxsize if (index.stop is None) else index.stop + elif step < 0: + start = -1 if (index.start is None) else index.start + stop = (-maxsize - 1) if (index.stop is None) else index.stop + else: + raise ValueError('slice step cannot be zero') + + # If either the start or stop index is negative, we'll need to cache + # the rest of the iterable in order to slice from the right side. + if (start < 0) or (stop < 0): + self._cache.extend(self._it) + # Otherwise we'll need to find the rightmost index and cache to that + # point. + else: + n = min(max(start, stop) + 1, maxsize) + cache_len = len(self._cache) + if n >= cache_len: + self._cache.extend(islice(self._it, n - cache_len)) + + return list(self._cache)[index] + + def __getitem__(self, index): + if isinstance(index, slice): + return self._get_slice(index) + + cache_len = len(self._cache) + if index < 0: + self._cache.extend(self._it) + elif index >= cache_len: + self._cache.extend(islice(self._it, index + 1 - cache_len)) + + return self._cache[index] + + +def collate(*iterables, **kwargs): + """Return a sorted merge of the items from each of several already-sorted + *iterables*. + + >>> list(collate('ACDZ', 'AZ', 'JKL')) + ['A', 'A', 'C', 'D', 'J', 'K', 'L', 'Z', 'Z'] + + Works lazily, keeping only the next value from each iterable in memory. Use + :func:`collate` to, for example, perform a n-way mergesort of items that + don't fit in memory. + + If a *key* function is specified, the iterables will be sorted according + to its result: + + >>> key = lambda s: int(s) # Sort by numeric value, not by string + >>> list(collate(['1', '10'], ['2', '11'], key=key)) + ['1', '2', '10', '11'] + + + If the *iterables* are sorted in descending order, set *reverse* to + ``True``: + + >>> list(collate([5, 3, 1], [4, 2, 0], reverse=True)) + [5, 4, 3, 2, 1, 0] + + If the elements of the passed-in iterables are out of order, you might get + unexpected results. + + On Python 3.5+, this function is an alias for :func:`heapq.merge`. + + """ + warnings.warn( + "collate is no longer part of more_itertools, use heapq.merge", + DeprecationWarning, + ) + return merge(*iterables, **kwargs) + + +def consumer(func): + """Decorator that automatically advances a PEP-342-style "reverse iterator" + to its first yield point so you don't have to call ``next()`` on it + manually. + + >>> @consumer + ... def tally(): + ... i = 0 + ... while True: + ... print('Thing number %s is %s.' % (i, (yield))) + ... i += 1 + ... + >>> t = tally() + >>> t.send('red') + Thing number 0 is red. + >>> t.send('fish') + Thing number 1 is fish. + + Without the decorator, you would have to call ``next(t)`` before + ``t.send()`` could be used. + + """ + + @wraps(func) + def wrapper(*args, **kwargs): + gen = func(*args, **kwargs) + next(gen) + return gen + + return wrapper + + +def ilen(iterable): + """Return the number of items in *iterable*. + + >>> ilen(x for x in range(1000000) if x % 3 == 0) + 333334 + + This consumes the iterable, so handle with care. + + """ + # This approach was selected because benchmarks showed it's likely the + # fastest of the known implementations at the time of writing. + # See GitHub tracker: #236, #230. + counter = count() + deque(zip(iterable, counter), maxlen=0) + return next(counter) + + +def iterate(func, start): + """Return ``start``, ``func(start)``, ``func(func(start))``, ... + + >>> from itertools import islice + >>> list(islice(iterate(lambda x: 2*x, 1), 10)) + [1, 2, 4, 8, 16, 32, 64, 128, 256, 512] + + """ + while True: + yield start + start = func(start) + + +def with_iter(context_manager): + """Wrap an iterable in a ``with`` statement, so it closes once exhausted. + + For example, this will close the file when the iterator is exhausted:: + + upper_lines = (line.upper() for line in with_iter(open('foo'))) + + Any context manager which returns an iterable is a candidate for + ``with_iter``. + + """ + with context_manager as iterable: + yield from iterable + + +def one(iterable, too_short=None, too_long=None): + """Return the first item from *iterable*, which is expected to contain only + that item. Raise an exception if *iterable* is empty or has more than one + item. + + :func:`one` is useful for ensuring that an iterable contains only one item. + For example, it can be used to retrieve the result of a database query + that is expected to return a single row. + + If *iterable* is empty, ``ValueError`` will be raised. You may specify a + different exception with the *too_short* keyword: + + >>> it = [] + >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + ValueError: too many items in iterable (expected 1)' + >>> too_short = IndexError('too few items') + >>> one(it, too_short=too_short) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + IndexError: too few items + + Similarly, if *iterable* contains more than one item, ``ValueError`` will + be raised. You may specify a different exception with the *too_long* + keyword: + + >>> it = ['too', 'many'] + >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + ValueError: Expected exactly one item in iterable, but got 'too', + 'many', and perhaps more. + >>> too_long = RuntimeError + >>> one(it, too_long=too_long) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + RuntimeError + + Note that :func:`one` attempts to advance *iterable* twice to ensure there + is only one item. See :func:`spy` or :func:`peekable` to check iterable + contents less destructively. + + """ + it = iter(iterable) + + try: + first_value = next(it) + except StopIteration as e: + raise ( + too_short or ValueError('too few items in iterable (expected 1)') + ) from e + + try: + second_value = next(it) + except StopIteration: + pass + else: + msg = ( + 'Expected exactly one item in iterable, but got {!r}, {!r}, ' + 'and perhaps more.'.format(first_value, second_value) + ) + raise too_long or ValueError(msg) + + return first_value + + +def raise_(exception, *args): + raise exception(*args) + + +def strictly_n(iterable, n, too_short=None, too_long=None): + """Validate that *iterable* has exactly *n* items and return them if + it does. If it has fewer than *n* items, call function *too_short* + with those items. If it has more than *n* items, call function + *too_long* with the first ``n + 1`` items. + + >>> iterable = ['a', 'b', 'c', 'd'] + >>> n = 4 + >>> list(strictly_n(iterable, n)) + ['a', 'b', 'c', 'd'] + + By default, *too_short* and *too_long* are functions that raise + ``ValueError``. + + >>> list(strictly_n('ab', 3)) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + ValueError: too few items in iterable (got 2) + + >>> list(strictly_n('abc', 2)) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + ValueError: too many items in iterable (got at least 3) + + You can instead supply functions that do something else. + *too_short* will be called with the number of items in *iterable*. + *too_long* will be called with `n + 1`. + + >>> def too_short(item_count): + ... raise RuntimeError + >>> it = strictly_n('abcd', 6, too_short=too_short) + >>> list(it) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + RuntimeError + + >>> def too_long(item_count): + ... print('The boss is going to hear about this') + >>> it = strictly_n('abcdef', 4, too_long=too_long) + >>> list(it) + The boss is going to hear about this + ['a', 'b', 'c', 'd'] + + """ + if too_short is None: + too_short = lambda item_count: raise_( + ValueError, + 'Too few items in iterable (got {})'.format(item_count), + ) + + if too_long is None: + too_long = lambda item_count: raise_( + ValueError, + 'Too many items in iterable (got at least {})'.format(item_count), + ) + + it = iter(iterable) + for i in range(n): + try: + item = next(it) + except StopIteration: + too_short(i) + return + else: + yield item + + try: + next(it) + except StopIteration: + pass + else: + too_long(n + 1) + + +def distinct_permutations(iterable, r=None): + """Yield successive distinct permutations of the elements in *iterable*. + + >>> sorted(distinct_permutations([1, 0, 1])) + [(0, 1, 1), (1, 0, 1), (1, 1, 0)] + + Equivalent to ``set(permutations(iterable))``, except duplicates are not + generated and thrown away. For larger input sequences this is much more + efficient. + + Duplicate permutations arise when there are duplicated elements in the + input iterable. The number of items returned is + `n! / (x_1! * x_2! * ... * x_n!)`, where `n` is the total number of + items input, and each `x_i` is the count of a distinct item in the input + sequence. + + If *r* is given, only the *r*-length permutations are yielded. + + >>> sorted(distinct_permutations([1, 0, 1], r=2)) + [(0, 1), (1, 0), (1, 1)] + >>> sorted(distinct_permutations(range(3), r=2)) + [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + + """ + # Algorithm: https://w.wiki/Qai + def _full(A): + while True: + # Yield the permutation we have + yield tuple(A) + + # Find the largest index i such that A[i] < A[i + 1] + for i in range(size - 2, -1, -1): + if A[i] < A[i + 1]: + break + # If no such index exists, this permutation is the last one + else: + return + + # Find the largest index j greater than j such that A[i] < A[j] + for j in range(size - 1, i, -1): + if A[i] < A[j]: + break + + # Swap the value of A[i] with that of A[j], then reverse the + # sequence from A[i + 1] to form the new permutation + A[i], A[j] = A[j], A[i] + A[i + 1 :] = A[: i - size : -1] # A[i + 1:][::-1] + + # Algorithm: modified from the above + def _partial(A, r): + # Split A into the first r items and the last r items + head, tail = A[:r], A[r:] + right_head_indexes = range(r - 1, -1, -1) + left_tail_indexes = range(len(tail)) + + while True: + # Yield the permutation we have + yield tuple(head) + + # Starting from the right, find the first index of the head with + # value smaller than the maximum value of the tail - call it i. + pivot = tail[-1] + for i in right_head_indexes: + if head[i] < pivot: + break + pivot = head[i] + else: + return + + # Starting from the left, find the first value of the tail + # with a value greater than head[i] and swap. + for j in left_tail_indexes: + if tail[j] > head[i]: + head[i], tail[j] = tail[j], head[i] + break + # If we didn't find one, start from the right and find the first + # index of the head with a value greater than head[i] and swap. + else: + for j in right_head_indexes: + if head[j] > head[i]: + head[i], head[j] = head[j], head[i] + break + + # Reverse head[i + 1:] and swap it with tail[:r - (i + 1)] + tail += head[: i - r : -1] # head[i + 1:][::-1] + i += 1 + head[i:], tail[:] = tail[: r - i], tail[r - i :] + + items = sorted(iterable) + + size = len(items) + if r is None: + r = size + + if 0 < r <= size: + return _full(items) if (r == size) else _partial(items, r) + + return iter(() if r else ((),)) + + +def intersperse(e, iterable, n=1): + """Intersperse filler element *e* among the items in *iterable*, leaving + *n* items between each filler element. + + >>> list(intersperse('!', [1, 2, 3, 4, 5])) + [1, '!', 2, '!', 3, '!', 4, '!', 5] + + >>> list(intersperse(None, [1, 2, 3, 4, 5], n=2)) + [1, 2, None, 3, 4, None, 5] + + """ + if n == 0: + raise ValueError('n must be > 0') + elif n == 1: + # interleave(repeat(e), iterable) -> e, x_0, e, x_1, e, x_2... + # islice(..., 1, None) -> x_0, e, x_1, e, x_2... + return islice(interleave(repeat(e), iterable), 1, None) + else: + # interleave(filler, chunks) -> [e], [x_0, x_1], [e], [x_2, x_3]... + # islice(..., 1, None) -> [x_0, x_1], [e], [x_2, x_3]... + # flatten(...) -> x_0, x_1, e, x_2, x_3... + filler = repeat([e]) + chunks = chunked(iterable, n) + return flatten(islice(interleave(filler, chunks), 1, None)) + + +def unique_to_each(*iterables): + """Return the elements from each of the input iterables that aren't in the + other input iterables. + + For example, suppose you have a set of packages, each with a set of + dependencies:: + + {'pkg_1': {'A', 'B'}, 'pkg_2': {'B', 'C'}, 'pkg_3': {'B', 'D'}} + + If you remove one package, which dependencies can also be removed? + + If ``pkg_1`` is removed, then ``A`` is no longer necessary - it is not + associated with ``pkg_2`` or ``pkg_3``. Similarly, ``C`` is only needed for + ``pkg_2``, and ``D`` is only needed for ``pkg_3``:: + + >>> unique_to_each({'A', 'B'}, {'B', 'C'}, {'B', 'D'}) + [['A'], ['C'], ['D']] + + If there are duplicates in one input iterable that aren't in the others + they will be duplicated in the output. Input order is preserved:: + + >>> unique_to_each("mississippi", "missouri") + [['p', 'p'], ['o', 'u', 'r']] + + It is assumed that the elements of each iterable are hashable. + + """ + pool = [list(it) for it in iterables] + counts = Counter(chain.from_iterable(map(set, pool))) + uniques = {element for element in counts if counts[element] == 1} + return [list(filter(uniques.__contains__, it)) for it in pool] + + +def windowed(seq, n, fillvalue=None, step=1): + """Return a sliding window of width *n* over the given iterable. + + >>> all_windows = windowed([1, 2, 3, 4, 5], 3) + >>> list(all_windows) + [(1, 2, 3), (2, 3, 4), (3, 4, 5)] + + When the window is larger than the iterable, *fillvalue* is used in place + of missing values: + + >>> list(windowed([1, 2, 3], 4)) + [(1, 2, 3, None)] + + Each window will advance in increments of *step*: + + >>> list(windowed([1, 2, 3, 4, 5, 6], 3, fillvalue='!', step=2)) + [(1, 2, 3), (3, 4, 5), (5, 6, '!')] + + To slide into the iterable's items, use :func:`chain` to add filler items + to the left: + + >>> iterable = [1, 2, 3, 4] + >>> n = 3 + >>> padding = [None] * (n - 1) + >>> list(windowed(chain(padding, iterable), 3)) + [(None, None, 1), (None, 1, 2), (1, 2, 3), (2, 3, 4)] + """ + if n < 0: + raise ValueError('n must be >= 0') + if n == 0: + yield tuple() + return + if step < 1: + raise ValueError('step must be >= 1') + + window = deque(maxlen=n) + i = n + for _ in map(window.append, seq): + i -= 1 + if not i: + i = step + yield tuple(window) + + size = len(window) + if size < n: + yield tuple(chain(window, repeat(fillvalue, n - size))) + elif 0 < i < min(step, n): + window += (fillvalue,) * i + yield tuple(window) + + +def substrings(iterable): + """Yield all of the substrings of *iterable*. + + >>> [''.join(s) for s in substrings('more')] + ['m', 'o', 'r', 'e', 'mo', 'or', 're', 'mor', 'ore', 'more'] + + Note that non-string iterables can also be subdivided. + + >>> list(substrings([0, 1, 2])) + [(0,), (1,), (2,), (0, 1), (1, 2), (0, 1, 2)] + + """ + # The length-1 substrings + seq = [] + for item in iter(iterable): + seq.append(item) + yield (item,) + seq = tuple(seq) + item_count = len(seq) + + # And the rest + for n in range(2, item_count + 1): + for i in range(item_count - n + 1): + yield seq[i : i + n] + + +def substrings_indexes(seq, reverse=False): + """Yield all substrings and their positions in *seq* + + The items yielded will be a tuple of the form ``(substr, i, j)``, where + ``substr == seq[i:j]``. + + This function only works for iterables that support slicing, such as + ``str`` objects. + + >>> for item in substrings_indexes('more'): + ... print(item) + ('m', 0, 1) + ('o', 1, 2) + ('r', 2, 3) + ('e', 3, 4) + ('mo', 0, 2) + ('or', 1, 3) + ('re', 2, 4) + ('mor', 0, 3) + ('ore', 1, 4) + ('more', 0, 4) + + Set *reverse* to ``True`` to yield the same items in the opposite order. + + + """ + r = range(1, len(seq) + 1) + if reverse: + r = reversed(r) + return ( + (seq[i : i + L], i, i + L) for L in r for i in range(len(seq) - L + 1) + ) + + +class bucket: + """Wrap *iterable* and return an object that buckets it iterable into + child iterables based on a *key* function. + + >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3'] + >>> s = bucket(iterable, key=lambda x: x[0]) # Bucket by 1st character + >>> sorted(list(s)) # Get the keys + ['a', 'b', 'c'] + >>> a_iterable = s['a'] + >>> next(a_iterable) + 'a1' + >>> next(a_iterable) + 'a2' + >>> list(s['b']) + ['b1', 'b2', 'b3'] + + The original iterable will be advanced and its items will be cached until + they are used by the child iterables. This may require significant storage. + + By default, attempting to select a bucket to which no items belong will + exhaust the iterable and cache all values. + If you specify a *validator* function, selected buckets will instead be + checked against it. + + >>> from itertools import count + >>> it = count(1, 2) # Infinite sequence of odd numbers + >>> key = lambda x: x % 10 # Bucket by last digit + >>> validator = lambda x: x in {1, 3, 5, 7, 9} # Odd digits only + >>> s = bucket(it, key=key, validator=validator) + >>> 2 in s + False + >>> list(s[2]) + [] + + """ + + def __init__(self, iterable, key, validator=None): + self._it = iter(iterable) + self._key = key + self._cache = defaultdict(deque) + self._validator = validator or (lambda x: True) + + def __contains__(self, value): + if not self._validator(value): + return False + + try: + item = next(self[value]) + except StopIteration: + return False + else: + self._cache[value].appendleft(item) + + return True + + def _get_values(self, value): + """ + Helper to yield items from the parent iterator that match *value*. + Items that don't match are stored in the local cache as they + are encountered. + """ + while True: + # If we've cached some items that match the target value, emit + # the first one and evict it from the cache. + if self._cache[value]: + yield self._cache[value].popleft() + # Otherwise we need to advance the parent iterator to search for + # a matching item, caching the rest. + else: + while True: + try: + item = next(self._it) + except StopIteration: + return + item_value = self._key(item) + if item_value == value: + yield item + break + elif self._validator(item_value): + self._cache[item_value].append(item) + + def __iter__(self): + for item in self._it: + item_value = self._key(item) + if self._validator(item_value): + self._cache[item_value].append(item) + + yield from self._cache.keys() + + def __getitem__(self, value): + if not self._validator(value): + return iter(()) + + return self._get_values(value) + + +def spy(iterable, n=1): + """Return a 2-tuple with a list containing the first *n* elements of + *iterable*, and an iterator with the same items as *iterable*. + This allows you to "look ahead" at the items in the iterable without + advancing it. + + There is one item in the list by default: + + >>> iterable = 'abcdefg' + >>> head, iterable = spy(iterable) + >>> head + ['a'] + >>> list(iterable) + ['a', 'b', 'c', 'd', 'e', 'f', 'g'] + + You may use unpacking to retrieve items instead of lists: + + >>> (head,), iterable = spy('abcdefg') + >>> head + 'a' + >>> (first, second), iterable = spy('abcdefg', 2) + >>> first + 'a' + >>> second + 'b' + + The number of items requested can be larger than the number of items in + the iterable: + + >>> iterable = [1, 2, 3, 4, 5] + >>> head, iterable = spy(iterable, 10) + >>> head + [1, 2, 3, 4, 5] + >>> list(iterable) + [1, 2, 3, 4, 5] + + """ + it = iter(iterable) + head = take(n, it) + + return head.copy(), chain(head, it) + + +def interleave(*iterables): + """Return a new iterable yielding from each iterable in turn, + until the shortest is exhausted. + + >>> list(interleave([1, 2, 3], [4, 5], [6, 7, 8])) + [1, 4, 6, 2, 5, 7] + + For a version that doesn't terminate after the shortest iterable is + exhausted, see :func:`interleave_longest`. + + """ + return chain.from_iterable(zip(*iterables)) + + +def interleave_longest(*iterables): + """Return a new iterable yielding from each iterable in turn, + skipping any that are exhausted. + + >>> list(interleave_longest([1, 2, 3], [4, 5], [6, 7, 8])) + [1, 4, 6, 2, 5, 7, 3, 8] + + This function produces the same output as :func:`roundrobin`, but may + perform better for some inputs (in particular when the number of iterables + is large). + + """ + i = chain.from_iterable(zip_longest(*iterables, fillvalue=_marker)) + return (x for x in i if x is not _marker) + + +def interleave_evenly(iterables, lengths=None): + """ + Interleave multiple iterables so that their elements are evenly distributed + throughout the output sequence. + + >>> iterables = [1, 2, 3, 4, 5], ['a', 'b'] + >>> list(interleave_evenly(iterables)) + [1, 2, 'a', 3, 4, 'b', 5] + + >>> iterables = [[1, 2, 3], [4, 5], [6, 7, 8]] + >>> list(interleave_evenly(iterables)) + [1, 6, 4, 2, 7, 3, 8, 5] + + This function requires iterables of known length. Iterables without + ``__len__()`` can be used by manually specifying lengths with *lengths*: + + >>> from itertools import combinations, repeat + >>> iterables = [combinations(range(4), 2), ['a', 'b', 'c']] + >>> lengths = [4 * (4 - 1) // 2, 3] + >>> list(interleave_evenly(iterables, lengths=lengths)) + [(0, 1), (0, 2), 'a', (0, 3), (1, 2), 'b', (1, 3), (2, 3), 'c'] + + Based on Bresenham's algorithm. + """ + if lengths is None: + try: + lengths = [len(it) for it in iterables] + except TypeError: + raise ValueError( + 'Iterable lengths could not be determined automatically. ' + 'Specify them with the lengths keyword.' + ) + elif len(iterables) != len(lengths): + raise ValueError('Mismatching number of iterables and lengths.') + + dims = len(lengths) + + # sort iterables by length, descending + lengths_permute = sorted( + range(dims), key=lambda i: lengths[i], reverse=True + ) + lengths_desc = [lengths[i] for i in lengths_permute] + iters_desc = [iter(iterables[i]) for i in lengths_permute] + + # the longest iterable is the primary one (Bresenham: the longest + # distance along an axis) + delta_primary, deltas_secondary = lengths_desc[0], lengths_desc[1:] + iter_primary, iters_secondary = iters_desc[0], iters_desc[1:] + errors = [delta_primary // dims] * len(deltas_secondary) + + to_yield = sum(lengths) + while to_yield: + yield next(iter_primary) + to_yield -= 1 + # update errors for each secondary iterable + errors = [e - delta for e, delta in zip(errors, deltas_secondary)] + + # those iterables for which the error is negative are yielded + # ("diagonal step" in Bresenham) + for i, e in enumerate(errors): + if e < 0: + yield next(iters_secondary[i]) + to_yield -= 1 + errors[i] += delta_primary + + +def collapse(iterable, base_type=None, levels=None): + """Flatten an iterable with multiple levels of nesting (e.g., a list of + lists of tuples) into non-iterable types. + + >>> iterable = [(1, 2), ([3, 4], [[5], [6]])] + >>> list(collapse(iterable)) + [1, 2, 3, 4, 5, 6] + + Binary and text strings are not considered iterable and + will not be collapsed. + + To avoid collapsing other types, specify *base_type*: + + >>> iterable = ['ab', ('cd', 'ef'), ['gh', 'ij']] + >>> list(collapse(iterable, base_type=tuple)) + ['ab', ('cd', 'ef'), 'gh', 'ij'] + + Specify *levels* to stop flattening after a certain level: + + >>> iterable = [('a', ['b']), ('c', ['d'])] + >>> list(collapse(iterable)) # Fully flattened + ['a', 'b', 'c', 'd'] + >>> list(collapse(iterable, levels=1)) # Only one level flattened + ['a', ['b'], 'c', ['d']] + + """ + + def walk(node, level): + if ( + ((levels is not None) and (level > levels)) + or isinstance(node, (str, bytes)) + or ((base_type is not None) and isinstance(node, base_type)) + ): + yield node + return + + try: + tree = iter(node) + except TypeError: + yield node + return + else: + for child in tree: + yield from walk(child, level + 1) + + yield from walk(iterable, 0) + + +def side_effect(func, iterable, chunk_size=None, before=None, after=None): + """Invoke *func* on each item in *iterable* (or on each *chunk_size* group + of items) before yielding the item. + + `func` must be a function that takes a single argument. Its return value + will be discarded. + + *before* and *after* are optional functions that take no arguments. They + will be executed before iteration starts and after it ends, respectively. + + `side_effect` can be used for logging, updating progress bars, or anything + that is not functionally "pure." + + Emitting a status message: + + >>> from more_itertools import consume + >>> func = lambda item: print('Received {}'.format(item)) + >>> consume(side_effect(func, range(2))) + Received 0 + Received 1 + + Operating on chunks of items: + + >>> pair_sums = [] + >>> func = lambda chunk: pair_sums.append(sum(chunk)) + >>> list(side_effect(func, [0, 1, 2, 3, 4, 5], 2)) + [0, 1, 2, 3, 4, 5] + >>> list(pair_sums) + [1, 5, 9] + + Writing to a file-like object: + + >>> from io import StringIO + >>> from more_itertools import consume + >>> f = StringIO() + >>> func = lambda x: print(x, file=f) + >>> before = lambda: print(u'HEADER', file=f) + >>> after = f.close + >>> it = [u'a', u'b', u'c'] + >>> consume(side_effect(func, it, before=before, after=after)) + >>> f.closed + True + + """ + try: + if before is not None: + before() + + if chunk_size is None: + for item in iterable: + func(item) + yield item + else: + for chunk in chunked(iterable, chunk_size): + func(chunk) + yield from chunk + finally: + if after is not None: + after() + + +def sliced(seq, n, strict=False): + """Yield slices of length *n* from the sequence *seq*. + + >>> list(sliced((1, 2, 3, 4, 5, 6), 3)) + [(1, 2, 3), (4, 5, 6)] + + By the default, the last yielded slice will have fewer than *n* elements + if the length of *seq* is not divisible by *n*: + + >>> list(sliced((1, 2, 3, 4, 5, 6, 7, 8), 3)) + [(1, 2, 3), (4, 5, 6), (7, 8)] + + If the length of *seq* is not divisible by *n* and *strict* is + ``True``, then ``ValueError`` will be raised before the last + slice is yielded. + + This function will only work for iterables that support slicing. + For non-sliceable iterables, see :func:`chunked`. + + """ + iterator = takewhile(len, (seq[i : i + n] for i in count(0, n))) + if strict: + + def ret(): + for _slice in iterator: + if len(_slice) != n: + raise ValueError("seq is not divisible by n.") + yield _slice + + return iter(ret()) + else: + return iterator + + +def split_at(iterable, pred, maxsplit=-1, keep_separator=False): + """Yield lists of items from *iterable*, where each list is delimited by + an item where callable *pred* returns ``True``. + + >>> list(split_at('abcdcba', lambda x: x == 'b')) + [['a'], ['c', 'd', 'c'], ['a']] + + >>> list(split_at(range(10), lambda n: n % 2 == 1)) + [[0], [2], [4], [6], [8], []] + + At most *maxsplit* splits are done. If *maxsplit* is not specified or -1, + then there is no limit on the number of splits: + + >>> list(split_at(range(10), lambda n: n % 2 == 1, maxsplit=2)) + [[0], [2], [4, 5, 6, 7, 8, 9]] + + By default, the delimiting items are not included in the output. + The include them, set *keep_separator* to ``True``. + + >>> list(split_at('abcdcba', lambda x: x == 'b', keep_separator=True)) + [['a'], ['b'], ['c', 'd', 'c'], ['b'], ['a']] + + """ + if maxsplit == 0: + yield list(iterable) + return + + buf = [] + it = iter(iterable) + for item in it: + if pred(item): + yield buf + if keep_separator: + yield [item] + if maxsplit == 1: + yield list(it) + return + buf = [] + maxsplit -= 1 + else: + buf.append(item) + yield buf + + +def split_before(iterable, pred, maxsplit=-1): + """Yield lists of items from *iterable*, where each list ends just before + an item for which callable *pred* returns ``True``: + + >>> list(split_before('OneTwo', lambda s: s.isupper())) + [['O', 'n', 'e'], ['T', 'w', 'o']] + + >>> list(split_before(range(10), lambda n: n % 3 == 0)) + [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]] + + At most *maxsplit* splits are done. If *maxsplit* is not specified or -1, + then there is no limit on the number of splits: + + >>> list(split_before(range(10), lambda n: n % 3 == 0, maxsplit=2)) + [[0, 1, 2], [3, 4, 5], [6, 7, 8, 9]] + """ + if maxsplit == 0: + yield list(iterable) + return + + buf = [] + it = iter(iterable) + for item in it: + if pred(item) and buf: + yield buf + if maxsplit == 1: + yield [item] + list(it) + return + buf = [] + maxsplit -= 1 + buf.append(item) + if buf: + yield buf + + +def split_after(iterable, pred, maxsplit=-1): + """Yield lists of items from *iterable*, where each list ends with an + item where callable *pred* returns ``True``: + + >>> list(split_after('one1two2', lambda s: s.isdigit())) + [['o', 'n', 'e', '1'], ['t', 'w', 'o', '2']] + + >>> list(split_after(range(10), lambda n: n % 3 == 0)) + [[0], [1, 2, 3], [4, 5, 6], [7, 8, 9]] + + At most *maxsplit* splits are done. If *maxsplit* is not specified or -1, + then there is no limit on the number of splits: + + >>> list(split_after(range(10), lambda n: n % 3 == 0, maxsplit=2)) + [[0], [1, 2, 3], [4, 5, 6, 7, 8, 9]] + + """ + if maxsplit == 0: + yield list(iterable) + return + + buf = [] + it = iter(iterable) + for item in it: + buf.append(item) + if pred(item) and buf: + yield buf + if maxsplit == 1: + yield list(it) + return + buf = [] + maxsplit -= 1 + if buf: + yield buf + + +def split_when(iterable, pred, maxsplit=-1): + """Split *iterable* into pieces based on the output of *pred*. + *pred* should be a function that takes successive pairs of items and + returns ``True`` if the iterable should be split in between them. + + For example, to find runs of increasing numbers, split the iterable when + element ``i`` is larger than element ``i + 1``: + + >>> list(split_when([1, 2, 3, 3, 2, 5, 2, 4, 2], lambda x, y: x > y)) + [[1, 2, 3, 3], [2, 5], [2, 4], [2]] + + At most *maxsplit* splits are done. If *maxsplit* is not specified or -1, + then there is no limit on the number of splits: + + >>> list(split_when([1, 2, 3, 3, 2, 5, 2, 4, 2], + ... lambda x, y: x > y, maxsplit=2)) + [[1, 2, 3, 3], [2, 5], [2, 4, 2]] + + """ + if maxsplit == 0: + yield list(iterable) + return + + it = iter(iterable) + try: + cur_item = next(it) + except StopIteration: + return + + buf = [cur_item] + for next_item in it: + if pred(cur_item, next_item): + yield buf + if maxsplit == 1: + yield [next_item] + list(it) + return + buf = [] + maxsplit -= 1 + + buf.append(next_item) + cur_item = next_item + + yield buf + + +def split_into(iterable, sizes): + """Yield a list of sequential items from *iterable* of length 'n' for each + integer 'n' in *sizes*. + + >>> list(split_into([1,2,3,4,5,6], [1,2,3])) + [[1], [2, 3], [4, 5, 6]] + + If the sum of *sizes* is smaller than the length of *iterable*, then the + remaining items of *iterable* will not be returned. + + >>> list(split_into([1,2,3,4,5,6], [2,3])) + [[1, 2], [3, 4, 5]] + + If the sum of *sizes* is larger than the length of *iterable*, fewer items + will be returned in the iteration that overruns *iterable* and further + lists will be empty: + + >>> list(split_into([1,2,3,4], [1,2,3,4])) + [[1], [2, 3], [4], []] + + When a ``None`` object is encountered in *sizes*, the returned list will + contain items up to the end of *iterable* the same way that itertools.slice + does: + + >>> list(split_into([1,2,3,4,5,6,7,8,9,0], [2,3,None])) + [[1, 2], [3, 4, 5], [6, 7, 8, 9, 0]] + + :func:`split_into` can be useful for grouping a series of items where the + sizes of the groups are not uniform. An example would be where in a row + from a table, multiple columns represent elements of the same feature + (e.g. a point represented by x,y,z) but, the format is not the same for + all columns. + """ + # convert the iterable argument into an iterator so its contents can + # be consumed by islice in case it is a generator + it = iter(iterable) + + for size in sizes: + if size is None: + yield list(it) + return + else: + yield list(islice(it, size)) + + +def padded(iterable, fillvalue=None, n=None, next_multiple=False): + """Yield the elements from *iterable*, followed by *fillvalue*, such that + at least *n* items are emitted. + + >>> list(padded([1, 2, 3], '?', 5)) + [1, 2, 3, '?', '?'] + + If *next_multiple* is ``True``, *fillvalue* will be emitted until the + number of items emitted is a multiple of *n*:: + + >>> list(padded([1, 2, 3, 4], n=3, next_multiple=True)) + [1, 2, 3, 4, None, None] + + If *n* is ``None``, *fillvalue* will be emitted indefinitely. + + """ + it = iter(iterable) + if n is None: + yield from chain(it, repeat(fillvalue)) + elif n < 1: + raise ValueError('n must be at least 1') + else: + item_count = 0 + for item in it: + yield item + item_count += 1 + + remaining = (n - item_count) % n if next_multiple else n - item_count + for _ in range(remaining): + yield fillvalue + + +def repeat_each(iterable, n=2): + """Repeat each element in *iterable* *n* times. + + >>> list(repeat_each('ABC', 3)) + ['A', 'A', 'A', 'B', 'B', 'B', 'C', 'C', 'C'] + """ + return chain.from_iterable(map(repeat, iterable, repeat(n))) + + +def repeat_last(iterable, default=None): + """After the *iterable* is exhausted, keep yielding its last element. + + >>> list(islice(repeat_last(range(3)), 5)) + [0, 1, 2, 2, 2] + + If the iterable is empty, yield *default* forever:: + + >>> list(islice(repeat_last(range(0), 42), 5)) + [42, 42, 42, 42, 42] + + """ + item = _marker + for item in iterable: + yield item + final = default if item is _marker else item + yield from repeat(final) + + +def distribute(n, iterable): + """Distribute the items from *iterable* among *n* smaller iterables. + + >>> group_1, group_2 = distribute(2, [1, 2, 3, 4, 5, 6]) + >>> list(group_1) + [1, 3, 5] + >>> list(group_2) + [2, 4, 6] + + If the length of *iterable* is not evenly divisible by *n*, then the + length of the returned iterables will not be identical: + + >>> children = distribute(3, [1, 2, 3, 4, 5, 6, 7]) + >>> [list(c) for c in children] + [[1, 4, 7], [2, 5], [3, 6]] + + If the length of *iterable* is smaller than *n*, then the last returned + iterables will be empty: + + >>> children = distribute(5, [1, 2, 3]) + >>> [list(c) for c in children] + [[1], [2], [3], [], []] + + This function uses :func:`itertools.tee` and may require significant + storage. If you need the order items in the smaller iterables to match the + original iterable, see :func:`divide`. + + """ + if n < 1: + raise ValueError('n must be at least 1') + + children = tee(iterable, n) + return [islice(it, index, None, n) for index, it in enumerate(children)] + + +def stagger(iterable, offsets=(-1, 0, 1), longest=False, fillvalue=None): + """Yield tuples whose elements are offset from *iterable*. + The amount by which the `i`-th item in each tuple is offset is given by + the `i`-th item in *offsets*. + + >>> list(stagger([0, 1, 2, 3])) + [(None, 0, 1), (0, 1, 2), (1, 2, 3)] + >>> list(stagger(range(8), offsets=(0, 2, 4))) + [(0, 2, 4), (1, 3, 5), (2, 4, 6), (3, 5, 7)] + + By default, the sequence will end when the final element of a tuple is the + last item in the iterable. To continue until the first element of a tuple + is the last item in the iterable, set *longest* to ``True``:: + + >>> list(stagger([0, 1, 2, 3], longest=True)) + [(None, 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, None), (3, None, None)] + + By default, ``None`` will be used to replace offsets beyond the end of the + sequence. Specify *fillvalue* to use some other value. + + """ + children = tee(iterable, len(offsets)) + + return zip_offset( + *children, offsets=offsets, longest=longest, fillvalue=fillvalue + ) + + +class UnequalIterablesError(ValueError): + def __init__(self, details=None): + msg = 'Iterables have different lengths' + if details is not None: + msg += (': index 0 has length {}; index {} has length {}').format( + *details + ) + + super().__init__(msg) + + +def _zip_equal_generator(iterables): + for combo in zip_longest(*iterables, fillvalue=_marker): + for val in combo: + if val is _marker: + raise UnequalIterablesError() + yield combo + + +def _zip_equal(*iterables): + # Check whether the iterables are all the same size. + try: + first_size = len(iterables[0]) + for i, it in enumerate(iterables[1:], 1): + size = len(it) + if size != first_size: + break + else: + # If we didn't break out, we can use the built-in zip. + return zip(*iterables) + + # If we did break out, there was a mismatch. + raise UnequalIterablesError(details=(first_size, i, size)) + # If any one of the iterables didn't have a length, start reading + # them until one runs out. + except TypeError: + return _zip_equal_generator(iterables) + + +def zip_equal(*iterables): + """``zip`` the input *iterables* together, but raise + ``UnequalIterablesError`` if they aren't all the same length. + + >>> it_1 = range(3) + >>> it_2 = iter('abc') + >>> list(zip_equal(it_1, it_2)) + [(0, 'a'), (1, 'b'), (2, 'c')] + + >>> it_1 = range(3) + >>> it_2 = iter('abcd') + >>> list(zip_equal(it_1, it_2)) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + more_itertools.more.UnequalIterablesError: Iterables have different + lengths + + """ + if hexversion >= 0x30A00A6: + warnings.warn( + ( + 'zip_equal will be removed in a future version of ' + 'more-itertools. Use the builtin zip function with ' + 'strict=True instead.' + ), + DeprecationWarning, + ) + + return _zip_equal(*iterables) + + +def zip_offset(*iterables, offsets, longest=False, fillvalue=None): + """``zip`` the input *iterables* together, but offset the `i`-th iterable + by the `i`-th item in *offsets*. + + >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1))) + [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e')] + + This can be used as a lightweight alternative to SciPy or pandas to analyze + data sets in which some series have a lead or lag relationship. + + By default, the sequence will end when the shortest iterable is exhausted. + To continue until the longest iterable is exhausted, set *longest* to + ``True``. + + >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1), longest=True)) + [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e'), (None, 'f')] + + By default, ``None`` will be used to replace offsets beyond the end of the + sequence. Specify *fillvalue* to use some other value. + + """ + if len(iterables) != len(offsets): + raise ValueError("Number of iterables and offsets didn't match") + + staggered = [] + for it, n in zip(iterables, offsets): + if n < 0: + staggered.append(chain(repeat(fillvalue, -n), it)) + elif n > 0: + staggered.append(islice(it, n, None)) + else: + staggered.append(it) + + if longest: + return zip_longest(*staggered, fillvalue=fillvalue) + + return zip(*staggered) + + +def sort_together(iterables, key_list=(0,), key=None, reverse=False): + """Return the input iterables sorted together, with *key_list* as the + priority for sorting. All iterables are trimmed to the length of the + shortest one. + + This can be used like the sorting function in a spreadsheet. If each + iterable represents a column of data, the key list determines which + columns are used for sorting. + + By default, all iterables are sorted using the ``0``-th iterable:: + + >>> iterables = [(4, 3, 2, 1), ('a', 'b', 'c', 'd')] + >>> sort_together(iterables) + [(1, 2, 3, 4), ('d', 'c', 'b', 'a')] + + Set a different key list to sort according to another iterable. + Specifying multiple keys dictates how ties are broken:: + + >>> iterables = [(3, 1, 2), (0, 1, 0), ('c', 'b', 'a')] + >>> sort_together(iterables, key_list=(1, 2)) + [(2, 3, 1), (0, 0, 1), ('a', 'c', 'b')] + + To sort by a function of the elements of the iterable, pass a *key* + function. Its arguments are the elements of the iterables corresponding to + the key list:: + + >>> names = ('a', 'b', 'c') + >>> lengths = (1, 2, 3) + >>> widths = (5, 2, 1) + >>> def area(length, width): + ... return length * width + >>> sort_together([names, lengths, widths], key_list=(1, 2), key=area) + [('c', 'b', 'a'), (3, 2, 1), (1, 2, 5)] + + Set *reverse* to ``True`` to sort in descending order. + + >>> sort_together([(1, 2, 3), ('c', 'b', 'a')], reverse=True) + [(3, 2, 1), ('a', 'b', 'c')] + + """ + if key is None: + # if there is no key function, the key argument to sorted is an + # itemgetter + key_argument = itemgetter(*key_list) + else: + # if there is a key function, call it with the items at the offsets + # specified by the key function as arguments + key_list = list(key_list) + if len(key_list) == 1: + # if key_list contains a single item, pass the item at that offset + # as the only argument to the key function + key_offset = key_list[0] + key_argument = lambda zipped_items: key(zipped_items[key_offset]) + else: + # if key_list contains multiple items, use itemgetter to return a + # tuple of items, which we pass as *args to the key function + get_key_items = itemgetter(*key_list) + key_argument = lambda zipped_items: key( + *get_key_items(zipped_items) + ) + + return list( + zip(*sorted(zip(*iterables), key=key_argument, reverse=reverse)) + ) + + +def unzip(iterable): + """The inverse of :func:`zip`, this function disaggregates the elements + of the zipped *iterable*. + + The ``i``-th iterable contains the ``i``-th element from each element + of the zipped iterable. The first element is used to to determine the + length of the remaining elements. + + >>> iterable = [('a', 1), ('b', 2), ('c', 3), ('d', 4)] + >>> letters, numbers = unzip(iterable) + >>> list(letters) + ['a', 'b', 'c', 'd'] + >>> list(numbers) + [1, 2, 3, 4] + + This is similar to using ``zip(*iterable)``, but it avoids reading + *iterable* into memory. Note, however, that this function uses + :func:`itertools.tee` and thus may require significant storage. + + """ + head, iterable = spy(iter(iterable)) + if not head: + # empty iterable, e.g. zip([], [], []) + return () + # spy returns a one-length iterable as head + head = head[0] + iterables = tee(iterable, len(head)) + + def itemgetter(i): + def getter(obj): + try: + return obj[i] + except IndexError: + # basically if we have an iterable like + # iter([(1, 2, 3), (4, 5), (6,)]) + # the second unzipped iterable would fail at the third tuple + # since it would try to access tup[1] + # same with the third unzipped iterable and the second tuple + # to support these "improperly zipped" iterables, + # we create a custom itemgetter + # which just stops the unzipped iterables + # at first length mismatch + raise StopIteration + + return getter + + return tuple(map(itemgetter(i), it) for i, it in enumerate(iterables)) + + +def divide(n, iterable): + """Divide the elements from *iterable* into *n* parts, maintaining + order. + + >>> group_1, group_2 = divide(2, [1, 2, 3, 4, 5, 6]) + >>> list(group_1) + [1, 2, 3] + >>> list(group_2) + [4, 5, 6] + + If the length of *iterable* is not evenly divisible by *n*, then the + length of the returned iterables will not be identical: + + >>> children = divide(3, [1, 2, 3, 4, 5, 6, 7]) + >>> [list(c) for c in children] + [[1, 2, 3], [4, 5], [6, 7]] + + If the length of the iterable is smaller than n, then the last returned + iterables will be empty: + + >>> children = divide(5, [1, 2, 3]) + >>> [list(c) for c in children] + [[1], [2], [3], [], []] + + This function will exhaust the iterable before returning and may require + significant storage. If order is not important, see :func:`distribute`, + which does not first pull the iterable into memory. + + """ + if n < 1: + raise ValueError('n must be at least 1') + + try: + iterable[:0] + except TypeError: + seq = tuple(iterable) + else: + seq = iterable + + q, r = divmod(len(seq), n) + + ret = [] + stop = 0 + for i in range(1, n + 1): + start = stop + stop += q + 1 if i <= r else q + ret.append(iter(seq[start:stop])) + + return ret + + +def always_iterable(obj, base_type=(str, bytes)): + """If *obj* is iterable, return an iterator over its items:: + + >>> obj = (1, 2, 3) + >>> list(always_iterable(obj)) + [1, 2, 3] + + If *obj* is not iterable, return a one-item iterable containing *obj*:: + + >>> obj = 1 + >>> list(always_iterable(obj)) + [1] + + If *obj* is ``None``, return an empty iterable: + + >>> obj = None + >>> list(always_iterable(None)) + [] + + By default, binary and text strings are not considered iterable:: + + >>> obj = 'foo' + >>> list(always_iterable(obj)) + ['foo'] + + If *base_type* is set, objects for which ``isinstance(obj, base_type)`` + returns ``True`` won't be considered iterable. + + >>> obj = {'a': 1} + >>> list(always_iterable(obj)) # Iterate over the dict's keys + ['a'] + >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit + [{'a': 1}] + + Set *base_type* to ``None`` to avoid any special handling and treat objects + Python considers iterable as iterable: + + >>> obj = 'foo' + >>> list(always_iterable(obj, base_type=None)) + ['f', 'o', 'o'] + """ + if obj is None: + return iter(()) + + if (base_type is not None) and isinstance(obj, base_type): + return iter((obj,)) + + try: + return iter(obj) + except TypeError: + return iter((obj,)) + + +def adjacent(predicate, iterable, distance=1): + """Return an iterable over `(bool, item)` tuples where the `item` is + drawn from *iterable* and the `bool` indicates whether + that item satisfies the *predicate* or is adjacent to an item that does. + + For example, to find whether items are adjacent to a ``3``:: + + >>> list(adjacent(lambda x: x == 3, range(6))) + [(False, 0), (False, 1), (True, 2), (True, 3), (True, 4), (False, 5)] + + Set *distance* to change what counts as adjacent. For example, to find + whether items are two places away from a ``3``: + + >>> list(adjacent(lambda x: x == 3, range(6), distance=2)) + [(False, 0), (True, 1), (True, 2), (True, 3), (True, 4), (True, 5)] + + This is useful for contextualizing the results of a search function. + For example, a code comparison tool might want to identify lines that + have changed, but also surrounding lines to give the viewer of the diff + context. + + The predicate function will only be called once for each item in the + iterable. + + See also :func:`groupby_transform`, which can be used with this function + to group ranges of items with the same `bool` value. + + """ + # Allow distance=0 mainly for testing that it reproduces results with map() + if distance < 0: + raise ValueError('distance must be at least 0') + + i1, i2 = tee(iterable) + padding = [False] * distance + selected = chain(padding, map(predicate, i1), padding) + adjacent_to_selected = map(any, windowed(selected, 2 * distance + 1)) + return zip(adjacent_to_selected, i2) + + +def groupby_transform(iterable, keyfunc=None, valuefunc=None, reducefunc=None): + """An extension of :func:`itertools.groupby` that can apply transformations + to the grouped data. + + * *keyfunc* is a function computing a key value for each item in *iterable* + * *valuefunc* is a function that transforms the individual items from + *iterable* after grouping + * *reducefunc* is a function that transforms each group of items + + >>> iterable = 'aAAbBBcCC' + >>> keyfunc = lambda k: k.upper() + >>> valuefunc = lambda v: v.lower() + >>> reducefunc = lambda g: ''.join(g) + >>> list(groupby_transform(iterable, keyfunc, valuefunc, reducefunc)) + [('A', 'aaa'), ('B', 'bbb'), ('C', 'ccc')] + + Each optional argument defaults to an identity function if not specified. + + :func:`groupby_transform` is useful when grouping elements of an iterable + using a separate iterable as the key. To do this, :func:`zip` the iterables + and pass a *keyfunc* that extracts the first element and a *valuefunc* + that extracts the second element:: + + >>> from operator import itemgetter + >>> keys = [0, 0, 1, 1, 1, 2, 2, 2, 3] + >>> values = 'abcdefghi' + >>> iterable = zip(keys, values) + >>> grouper = groupby_transform(iterable, itemgetter(0), itemgetter(1)) + >>> [(k, ''.join(g)) for k, g in grouper] + [(0, 'ab'), (1, 'cde'), (2, 'fgh'), (3, 'i')] + + Note that the order of items in the iterable is significant. + Only adjacent items are grouped together, so if you don't want any + duplicate groups, you should sort the iterable by the key function. + + """ + ret = groupby(iterable, keyfunc) + if valuefunc: + ret = ((k, map(valuefunc, g)) for k, g in ret) + if reducefunc: + ret = ((k, reducefunc(g)) for k, g in ret) + + return ret + + +class numeric_range(abc.Sequence, abc.Hashable): + """An extension of the built-in ``range()`` function whose arguments can + be any orderable numeric type. + + With only *stop* specified, *start* defaults to ``0`` and *step* + defaults to ``1``. The output items will match the type of *stop*: + + >>> list(numeric_range(3.5)) + [0.0, 1.0, 2.0, 3.0] + + With only *start* and *stop* specified, *step* defaults to ``1``. The + output items will match the type of *start*: + + >>> from decimal import Decimal + >>> start = Decimal('2.1') + >>> stop = Decimal('5.1') + >>> list(numeric_range(start, stop)) + [Decimal('2.1'), Decimal('3.1'), Decimal('4.1')] + + With *start*, *stop*, and *step* specified the output items will match + the type of ``start + step``: + + >>> from fractions import Fraction + >>> start = Fraction(1, 2) # Start at 1/2 + >>> stop = Fraction(5, 2) # End at 5/2 + >>> step = Fraction(1, 2) # Count by 1/2 + >>> list(numeric_range(start, stop, step)) + [Fraction(1, 2), Fraction(1, 1), Fraction(3, 2), Fraction(2, 1)] + + If *step* is zero, ``ValueError`` is raised. Negative steps are supported: + + >>> list(numeric_range(3, -1, -1.0)) + [3.0, 2.0, 1.0, 0.0] + + Be aware of the limitations of floating point numbers; the representation + of the yielded numbers may be surprising. + + ``datetime.datetime`` objects can be used for *start* and *stop*, if *step* + is a ``datetime.timedelta`` object: + + >>> import datetime + >>> start = datetime.datetime(2019, 1, 1) + >>> stop = datetime.datetime(2019, 1, 3) + >>> step = datetime.timedelta(days=1) + >>> items = iter(numeric_range(start, stop, step)) + >>> next(items) + datetime.datetime(2019, 1, 1, 0, 0) + >>> next(items) + datetime.datetime(2019, 1, 2, 0, 0) + + """ + + _EMPTY_HASH = hash(range(0, 0)) + + def __init__(self, *args): + argc = len(args) + if argc == 1: + (self._stop,) = args + self._start = type(self._stop)(0) + self._step = type(self._stop - self._start)(1) + elif argc == 2: + self._start, self._stop = args + self._step = type(self._stop - self._start)(1) + elif argc == 3: + self._start, self._stop, self._step = args + elif argc == 0: + raise TypeError( + 'numeric_range expected at least ' + '1 argument, got {}'.format(argc) + ) + else: + raise TypeError( + 'numeric_range expected at most ' + '3 arguments, got {}'.format(argc) + ) + + self._zero = type(self._step)(0) + if self._step == self._zero: + raise ValueError('numeric_range() arg 3 must not be zero') + self._growing = self._step > self._zero + self._init_len() + + def __bool__(self): + if self._growing: + return self._start < self._stop + else: + return self._start > self._stop + + def __contains__(self, elem): + if self._growing: + if self._start <= elem < self._stop: + return (elem - self._start) % self._step == self._zero + else: + if self._start >= elem > self._stop: + return (self._start - elem) % (-self._step) == self._zero + + return False + + def __eq__(self, other): + if isinstance(other, numeric_range): + empty_self = not bool(self) + empty_other = not bool(other) + if empty_self or empty_other: + return empty_self and empty_other # True if both empty + else: + return ( + self._start == other._start + and self._step == other._step + and self._get_by_index(-1) == other._get_by_index(-1) + ) + else: + return False + + def __getitem__(self, key): + if isinstance(key, int): + return self._get_by_index(key) + elif isinstance(key, slice): + step = self._step if key.step is None else key.step * self._step + + if key.start is None or key.start <= -self._len: + start = self._start + elif key.start >= self._len: + start = self._stop + else: # -self._len < key.start < self._len + start = self._get_by_index(key.start) + + if key.stop is None or key.stop >= self._len: + stop = self._stop + elif key.stop <= -self._len: + stop = self._start + else: # -self._len < key.stop < self._len + stop = self._get_by_index(key.stop) + + return numeric_range(start, stop, step) + else: + raise TypeError( + 'numeric range indices must be ' + 'integers or slices, not {}'.format(type(key).__name__) + ) + + def __hash__(self): + if self: + return hash((self._start, self._get_by_index(-1), self._step)) + else: + return self._EMPTY_HASH + + def __iter__(self): + values = (self._start + (n * self._step) for n in count()) + if self._growing: + return takewhile(partial(gt, self._stop), values) + else: + return takewhile(partial(lt, self._stop), values) + + def __len__(self): + return self._len + + def _init_len(self): + if self._growing: + start = self._start + stop = self._stop + step = self._step + else: + start = self._stop + stop = self._start + step = -self._step + distance = stop - start + if distance <= self._zero: + self._len = 0 + else: # distance > 0 and step > 0: regular euclidean division + q, r = divmod(distance, step) + self._len = int(q) + int(r != self._zero) + + def __reduce__(self): + return numeric_range, (self._start, self._stop, self._step) + + def __repr__(self): + if self._step == 1: + return "numeric_range({}, {})".format( + repr(self._start), repr(self._stop) + ) + else: + return "numeric_range({}, {}, {})".format( + repr(self._start), repr(self._stop), repr(self._step) + ) + + def __reversed__(self): + return iter( + numeric_range( + self._get_by_index(-1), self._start - self._step, -self._step + ) + ) + + def count(self, value): + return int(value in self) + + def index(self, value): + if self._growing: + if self._start <= value < self._stop: + q, r = divmod(value - self._start, self._step) + if r == self._zero: + return int(q) + else: + if self._start >= value > self._stop: + q, r = divmod(self._start - value, -self._step) + if r == self._zero: + return int(q) + + raise ValueError("{} is not in numeric range".format(value)) + + def _get_by_index(self, i): + if i < 0: + i += self._len + if i < 0 or i >= self._len: + raise IndexError("numeric range object index out of range") + return self._start + i * self._step + + +def count_cycle(iterable, n=None): + """Cycle through the items from *iterable* up to *n* times, yielding + the number of completed cycles along with each item. If *n* is omitted the + process repeats indefinitely. + + >>> list(count_cycle('AB', 3)) + [(0, 'A'), (0, 'B'), (1, 'A'), (1, 'B'), (2, 'A'), (2, 'B')] + + """ + iterable = tuple(iterable) + if not iterable: + return iter(()) + counter = count() if n is None else range(n) + return ((i, item) for i in counter for item in iterable) + + +def mark_ends(iterable): + """Yield 3-tuples of the form ``(is_first, is_last, item)``. + + >>> list(mark_ends('ABC')) + [(True, False, 'A'), (False, False, 'B'), (False, True, 'C')] + + Use this when looping over an iterable to take special action on its first + and/or last items: + + >>> iterable = ['Header', 100, 200, 'Footer'] + >>> total = 0 + >>> for is_first, is_last, item in mark_ends(iterable): + ... if is_first: + ... continue # Skip the header + ... if is_last: + ... continue # Skip the footer + ... total += item + >>> print(total) + 300 + """ + it = iter(iterable) + + try: + b = next(it) + except StopIteration: + return + + try: + for i in count(): + a = b + b = next(it) + yield i == 0, False, a + + except StopIteration: + yield i == 0, True, a + + +def locate(iterable, pred=bool, window_size=None): + """Yield the index of each item in *iterable* for which *pred* returns + ``True``. + + *pred* defaults to :func:`bool`, which will select truthy items: + + >>> list(locate([0, 1, 1, 0, 1, 0, 0])) + [1, 2, 4] + + Set *pred* to a custom function to, e.g., find the indexes for a particular + item. + + >>> list(locate(['a', 'b', 'c', 'b'], lambda x: x == 'b')) + [1, 3] + + If *window_size* is given, then the *pred* function will be called with + that many items. This enables searching for sub-sequences: + + >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3] + >>> pred = lambda *args: args == (1, 2, 3) + >>> list(locate(iterable, pred=pred, window_size=3)) + [1, 5, 9] + + Use with :func:`seekable` to find indexes and then retrieve the associated + items: + + >>> from itertools import count + >>> from more_itertools import seekable + >>> source = (3 * n + 1 if (n % 2) else n // 2 for n in count()) + >>> it = seekable(source) + >>> pred = lambda x: x > 100 + >>> indexes = locate(it, pred=pred) + >>> i = next(indexes) + >>> it.seek(i) + >>> next(it) + 106 + + """ + if window_size is None: + return compress(count(), map(pred, iterable)) + + if window_size < 1: + raise ValueError('window size must be at least 1') + + it = windowed(iterable, window_size, fillvalue=_marker) + return compress(count(), starmap(pred, it)) + + +def lstrip(iterable, pred): + """Yield the items from *iterable*, but strip any from the beginning + for which *pred* returns ``True``. + + For example, to remove a set of items from the start of an iterable: + + >>> iterable = (None, False, None, 1, 2, None, 3, False, None) + >>> pred = lambda x: x in {None, False, ''} + >>> list(lstrip(iterable, pred)) + [1, 2, None, 3, False, None] + + This function is analogous to to :func:`str.lstrip`, and is essentially + an wrapper for :func:`itertools.dropwhile`. + + """ + return dropwhile(pred, iterable) + + +def rstrip(iterable, pred): + """Yield the items from *iterable*, but strip any from the end + for which *pred* returns ``True``. + + For example, to remove a set of items from the end of an iterable: + + >>> iterable = (None, False, None, 1, 2, None, 3, False, None) + >>> pred = lambda x: x in {None, False, ''} + >>> list(rstrip(iterable, pred)) + [None, False, None, 1, 2, None, 3] + + This function is analogous to :func:`str.rstrip`. + + """ + cache = [] + cache_append = cache.append + cache_clear = cache.clear + for x in iterable: + if pred(x): + cache_append(x) + else: + yield from cache + cache_clear() + yield x + + +def strip(iterable, pred): + """Yield the items from *iterable*, but strip any from the + beginning and end for which *pred* returns ``True``. + + For example, to remove a set of items from both ends of an iterable: + + >>> iterable = (None, False, None, 1, 2, None, 3, False, None) + >>> pred = lambda x: x in {None, False, ''} + >>> list(strip(iterable, pred)) + [1, 2, None, 3] + + This function is analogous to :func:`str.strip`. + + """ + return rstrip(lstrip(iterable, pred), pred) + + +class islice_extended: + """An extension of :func:`itertools.islice` that supports negative values + for *stop*, *start*, and *step*. + + >>> iterable = iter('abcdefgh') + >>> list(islice_extended(iterable, -4, -1)) + ['e', 'f', 'g'] + + Slices with negative values require some caching of *iterable*, but this + function takes care to minimize the amount of memory required. + + For example, you can use a negative step with an infinite iterator: + + >>> from itertools import count + >>> list(islice_extended(count(), 110, 99, -2)) + [110, 108, 106, 104, 102, 100] + + You can also use slice notation directly: + + >>> iterable = map(str, count()) + >>> it = islice_extended(iterable)[10:20:2] + >>> list(it) + ['10', '12', '14', '16', '18'] + + """ + + def __init__(self, iterable, *args): + it = iter(iterable) + if args: + self._iterable = _islice_helper(it, slice(*args)) + else: + self._iterable = it + + def __iter__(self): + return self + + def __next__(self): + return next(self._iterable) + + def __getitem__(self, key): + if isinstance(key, slice): + return islice_extended(_islice_helper(self._iterable, key)) + + raise TypeError('islice_extended.__getitem__ argument must be a slice') + + +def _islice_helper(it, s): + start = s.start + stop = s.stop + if s.step == 0: + raise ValueError('step argument must be a non-zero integer or None.') + step = s.step or 1 + + if step > 0: + start = 0 if (start is None) else start + + if start < 0: + # Consume all but the last -start items + cache = deque(enumerate(it, 1), maxlen=-start) + len_iter = cache[-1][0] if cache else 0 + + # Adjust start to be positive + i = max(len_iter + start, 0) + + # Adjust stop to be positive + if stop is None: + j = len_iter + elif stop >= 0: + j = min(stop, len_iter) + else: + j = max(len_iter + stop, 0) + + # Slice the cache + n = j - i + if n <= 0: + return + + for index, item in islice(cache, 0, n, step): + yield item + elif (stop is not None) and (stop < 0): + # Advance to the start position + next(islice(it, start, start), None) + + # When stop is negative, we have to carry -stop items while + # iterating + cache = deque(islice(it, -stop), maxlen=-stop) + + for index, item in enumerate(it): + cached_item = cache.popleft() + if index % step == 0: + yield cached_item + cache.append(item) + else: + # When both start and stop are positive we have the normal case + yield from islice(it, start, stop, step) + else: + start = -1 if (start is None) else start + + if (stop is not None) and (stop < 0): + # Consume all but the last items + n = -stop - 1 + cache = deque(enumerate(it, 1), maxlen=n) + len_iter = cache[-1][0] if cache else 0 + + # If start and stop are both negative they are comparable and + # we can just slice. Otherwise we can adjust start to be negative + # and then slice. + if start < 0: + i, j = start, stop + else: + i, j = min(start - len_iter, -1), None + + for index, item in list(cache)[i:j:step]: + yield item + else: + # Advance to the stop position + if stop is not None: + m = stop + 1 + next(islice(it, m, m), None) + + # stop is positive, so if start is negative they are not comparable + # and we need the rest of the items. + if start < 0: + i = start + n = None + # stop is None and start is positive, so we just need items up to + # the start index. + elif stop is None: + i = None + n = start + 1 + # Both stop and start are positive, so they are comparable. + else: + i = None + n = start - stop + if n <= 0: + return + + cache = list(islice(it, n)) + + yield from cache[i::step] + + +def always_reversible(iterable): + """An extension of :func:`reversed` that supports all iterables, not + just those which implement the ``Reversible`` or ``Sequence`` protocols. + + >>> print(*always_reversible(x for x in range(3))) + 2 1 0 + + If the iterable is already reversible, this function returns the + result of :func:`reversed()`. If the iterable is not reversible, + this function will cache the remaining items in the iterable and + yield them in reverse order, which may require significant storage. + """ + try: + return reversed(iterable) + except TypeError: + return reversed(list(iterable)) + + +def consecutive_groups(iterable, ordering=lambda x: x): + """Yield groups of consecutive items using :func:`itertools.groupby`. + The *ordering* function determines whether two items are adjacent by + returning their position. + + By default, the ordering function is the identity function. This is + suitable for finding runs of numbers: + + >>> iterable = [1, 10, 11, 12, 20, 30, 31, 32, 33, 40] + >>> for group in consecutive_groups(iterable): + ... print(list(group)) + [1] + [10, 11, 12] + [20] + [30, 31, 32, 33] + [40] + + For finding runs of adjacent letters, try using the :meth:`index` method + of a string of letters: + + >>> from string import ascii_lowercase + >>> iterable = 'abcdfgilmnop' + >>> ordering = ascii_lowercase.index + >>> for group in consecutive_groups(iterable, ordering): + ... print(list(group)) + ['a', 'b', 'c', 'd'] + ['f', 'g'] + ['i'] + ['l', 'm', 'n', 'o', 'p'] + + Each group of consecutive items is an iterator that shares it source with + *iterable*. When an an output group is advanced, the previous group is + no longer available unless its elements are copied (e.g., into a ``list``). + + >>> iterable = [1, 2, 11, 12, 21, 22] + >>> saved_groups = [] + >>> for group in consecutive_groups(iterable): + ... saved_groups.append(list(group)) # Copy group elements + >>> saved_groups + [[1, 2], [11, 12], [21, 22]] + + """ + for k, g in groupby( + enumerate(iterable), key=lambda x: x[0] - ordering(x[1]) + ): + yield map(itemgetter(1), g) + + +def difference(iterable, func=sub, *, initial=None): + """This function is the inverse of :func:`itertools.accumulate`. By default + it will compute the first difference of *iterable* using + :func:`operator.sub`: + + >>> from itertools import accumulate + >>> iterable = accumulate([0, 1, 2, 3, 4]) # produces 0, 1, 3, 6, 10 + >>> list(difference(iterable)) + [0, 1, 2, 3, 4] + + *func* defaults to :func:`operator.sub`, but other functions can be + specified. They will be applied as follows:: + + A, B, C, D, ... --> A, func(B, A), func(C, B), func(D, C), ... + + For example, to do progressive division: + + >>> iterable = [1, 2, 6, 24, 120] + >>> func = lambda x, y: x // y + >>> list(difference(iterable, func)) + [1, 2, 3, 4, 5] + + If the *initial* keyword is set, the first element will be skipped when + computing successive differences. + + >>> it = [10, 11, 13, 16] # from accumulate([1, 2, 3], initial=10) + >>> list(difference(it, initial=10)) + [1, 2, 3] + + """ + a, b = tee(iterable) + try: + first = [next(b)] + except StopIteration: + return iter([]) + + if initial is not None: + first = [] + + return chain(first, starmap(func, zip(b, a))) + + +class SequenceView(Sequence): + """Return a read-only view of the sequence object *target*. + + :class:`SequenceView` objects are analogous to Python's built-in + "dictionary view" types. They provide a dynamic view of a sequence's items, + meaning that when the sequence updates, so does the view. + + >>> seq = ['0', '1', '2'] + >>> view = SequenceView(seq) + >>> view + SequenceView(['0', '1', '2']) + >>> seq.append('3') + >>> view + SequenceView(['0', '1', '2', '3']) + + Sequence views support indexing, slicing, and length queries. They act + like the underlying sequence, except they don't allow assignment: + + >>> view[1] + '1' + >>> view[1:-1] + ['1', '2'] + >>> len(view) + 4 + + Sequence views are useful as an alternative to copying, as they don't + require (much) extra storage. + + """ + + def __init__(self, target): + if not isinstance(target, Sequence): + raise TypeError + self._target = target + + def __getitem__(self, index): + return self._target[index] + + def __len__(self): + return len(self._target) + + def __repr__(self): + return '{}({})'.format(self.__class__.__name__, repr(self._target)) + + +class seekable: + """Wrap an iterator to allow for seeking backward and forward. This + progressively caches the items in the source iterable so they can be + re-visited. + + Call :meth:`seek` with an index to seek to that position in the source + iterable. + + To "reset" an iterator, seek to ``0``: + + >>> from itertools import count + >>> it = seekable((str(n) for n in count())) + >>> next(it), next(it), next(it) + ('0', '1', '2') + >>> it.seek(0) + >>> next(it), next(it), next(it) + ('0', '1', '2') + >>> next(it) + '3' + + You can also seek forward: + + >>> it = seekable((str(n) for n in range(20))) + >>> it.seek(10) + >>> next(it) + '10' + >>> it.seek(20) # Seeking past the end of the source isn't a problem + >>> list(it) + [] + >>> it.seek(0) # Resetting works even after hitting the end + >>> next(it), next(it), next(it) + ('0', '1', '2') + + Call :meth:`peek` to look ahead one item without advancing the iterator: + + >>> it = seekable('1234') + >>> it.peek() + '1' + >>> list(it) + ['1', '2', '3', '4'] + >>> it.peek(default='empty') + 'empty' + + Before the iterator is at its end, calling :func:`bool` on it will return + ``True``. After it will return ``False``: + + >>> it = seekable('5678') + >>> bool(it) + True + >>> list(it) + ['5', '6', '7', '8'] + >>> bool(it) + False + + You may view the contents of the cache with the :meth:`elements` method. + That returns a :class:`SequenceView`, a view that updates automatically: + + >>> it = seekable((str(n) for n in range(10))) + >>> next(it), next(it), next(it) + ('0', '1', '2') + >>> elements = it.elements() + >>> elements + SequenceView(['0', '1', '2']) + >>> next(it) + '3' + >>> elements + SequenceView(['0', '1', '2', '3']) + + By default, the cache grows as the source iterable progresses, so beware of + wrapping very large or infinite iterables. Supply *maxlen* to limit the + size of the cache (this of course limits how far back you can seek). + + >>> from itertools import count + >>> it = seekable((str(n) for n in count()), maxlen=2) + >>> next(it), next(it), next(it), next(it) + ('0', '1', '2', '3') + >>> list(it.elements()) + ['2', '3'] + >>> it.seek(0) + >>> next(it), next(it), next(it), next(it) + ('2', '3', '4', '5') + >>> next(it) + '6' + + """ + + def __init__(self, iterable, maxlen=None): + self._source = iter(iterable) + if maxlen is None: + self._cache = [] + else: + self._cache = deque([], maxlen) + self._index = None + + def __iter__(self): + return self + + def __next__(self): + if self._index is not None: + try: + item = self._cache[self._index] + except IndexError: + self._index = None + else: + self._index += 1 + return item + + item = next(self._source) + self._cache.append(item) + return item + + def __bool__(self): + try: + self.peek() + except StopIteration: + return False + return True + + def peek(self, default=_marker): + try: + peeked = next(self) + except StopIteration: + if default is _marker: + raise + return default + if self._index is None: + self._index = len(self._cache) + self._index -= 1 + return peeked + + def elements(self): + return SequenceView(self._cache) + + def seek(self, index): + self._index = index + remainder = index - len(self._cache) + if remainder > 0: + consume(self, remainder) + + +class run_length: + """ + :func:`run_length.encode` compresses an iterable with run-length encoding. + It yields groups of repeated items with the count of how many times they + were repeated: + + >>> uncompressed = 'abbcccdddd' + >>> list(run_length.encode(uncompressed)) + [('a', 1), ('b', 2), ('c', 3), ('d', 4)] + + :func:`run_length.decode` decompresses an iterable that was previously + compressed with run-length encoding. It yields the items of the + decompressed iterable: + + >>> compressed = [('a', 1), ('b', 2), ('c', 3), ('d', 4)] + >>> list(run_length.decode(compressed)) + ['a', 'b', 'b', 'c', 'c', 'c', 'd', 'd', 'd', 'd'] + + """ + + @staticmethod + def encode(iterable): + return ((k, ilen(g)) for k, g in groupby(iterable)) + + @staticmethod + def decode(iterable): + return chain.from_iterable(repeat(k, n) for k, n in iterable) + + +def exactly_n(iterable, n, predicate=bool): + """Return ``True`` if exactly ``n`` items in the iterable are ``True`` + according to the *predicate* function. + + >>> exactly_n([True, True, False], 2) + True + >>> exactly_n([True, True, False], 1) + False + >>> exactly_n([0, 1, 2, 3, 4, 5], 3, lambda x: x < 3) + True + + The iterable will be advanced until ``n + 1`` truthy items are encountered, + so avoid calling it on infinite iterables. + + """ + return len(take(n + 1, filter(predicate, iterable))) == n + + +def circular_shifts(iterable): + """Return a list of circular shifts of *iterable*. + + >>> circular_shifts(range(4)) + [(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)] + """ + lst = list(iterable) + return take(len(lst), windowed(cycle(lst), len(lst))) + + +def make_decorator(wrapping_func, result_index=0): + """Return a decorator version of *wrapping_func*, which is a function that + modifies an iterable. *result_index* is the position in that function's + signature where the iterable goes. + + This lets you use itertools on the "production end," i.e. at function + definition. This can augment what the function returns without changing the + function's code. + + For example, to produce a decorator version of :func:`chunked`: + + >>> from more_itertools import chunked + >>> chunker = make_decorator(chunked, result_index=0) + >>> @chunker(3) + ... def iter_range(n): + ... return iter(range(n)) + ... + >>> list(iter_range(9)) + [[0, 1, 2], [3, 4, 5], [6, 7, 8]] + + To only allow truthy items to be returned: + + >>> truth_serum = make_decorator(filter, result_index=1) + >>> @truth_serum(bool) + ... def boolean_test(): + ... return [0, 1, '', ' ', False, True] + ... + >>> list(boolean_test()) + [1, ' ', True] + + The :func:`peekable` and :func:`seekable` wrappers make for practical + decorators: + + >>> from more_itertools import peekable + >>> peekable_function = make_decorator(peekable) + >>> @peekable_function() + ... def str_range(*args): + ... return (str(x) for x in range(*args)) + ... + >>> it = str_range(1, 20, 2) + >>> next(it), next(it), next(it) + ('1', '3', '5') + >>> it.peek() + '7' + >>> next(it) + '7' + + """ + # See https://sites.google.com/site/bbayles/index/decorator_factory for + # notes on how this works. + def decorator(*wrapping_args, **wrapping_kwargs): + def outer_wrapper(f): + def inner_wrapper(*args, **kwargs): + result = f(*args, **kwargs) + wrapping_args_ = list(wrapping_args) + wrapping_args_.insert(result_index, result) + return wrapping_func(*wrapping_args_, **wrapping_kwargs) + + return inner_wrapper + + return outer_wrapper + + return decorator + + +def map_reduce(iterable, keyfunc, valuefunc=None, reducefunc=None): + """Return a dictionary that maps the items in *iterable* to categories + defined by *keyfunc*, transforms them with *valuefunc*, and + then summarizes them by category with *reducefunc*. + + *valuefunc* defaults to the identity function if it is unspecified. + If *reducefunc* is unspecified, no summarization takes place: + + >>> keyfunc = lambda x: x.upper() + >>> result = map_reduce('abbccc', keyfunc) + >>> sorted(result.items()) + [('A', ['a']), ('B', ['b', 'b']), ('C', ['c', 'c', 'c'])] + + Specifying *valuefunc* transforms the categorized items: + + >>> keyfunc = lambda x: x.upper() + >>> valuefunc = lambda x: 1 + >>> result = map_reduce('abbccc', keyfunc, valuefunc) + >>> sorted(result.items()) + [('A', [1]), ('B', [1, 1]), ('C', [1, 1, 1])] + + Specifying *reducefunc* summarizes the categorized items: + + >>> keyfunc = lambda x: x.upper() + >>> valuefunc = lambda x: 1 + >>> reducefunc = sum + >>> result = map_reduce('abbccc', keyfunc, valuefunc, reducefunc) + >>> sorted(result.items()) + [('A', 1), ('B', 2), ('C', 3)] + + You may want to filter the input iterable before applying the map/reduce + procedure: + + >>> all_items = range(30) + >>> items = [x for x in all_items if 10 <= x <= 20] # Filter + >>> keyfunc = lambda x: x % 2 # Evens map to 0; odds to 1 + >>> categories = map_reduce(items, keyfunc=keyfunc) + >>> sorted(categories.items()) + [(0, [10, 12, 14, 16, 18, 20]), (1, [11, 13, 15, 17, 19])] + >>> summaries = map_reduce(items, keyfunc=keyfunc, reducefunc=sum) + >>> sorted(summaries.items()) + [(0, 90), (1, 75)] + + Note that all items in the iterable are gathered into a list before the + summarization step, which may require significant storage. + + The returned object is a :obj:`collections.defaultdict` with the + ``default_factory`` set to ``None``, such that it behaves like a normal + dictionary. + + """ + valuefunc = (lambda x: x) if (valuefunc is None) else valuefunc + + ret = defaultdict(list) + for item in iterable: + key = keyfunc(item) + value = valuefunc(item) + ret[key].append(value) + + if reducefunc is not None: + for key, value_list in ret.items(): + ret[key] = reducefunc(value_list) + + ret.default_factory = None + return ret + + +def rlocate(iterable, pred=bool, window_size=None): + """Yield the index of each item in *iterable* for which *pred* returns + ``True``, starting from the right and moving left. + + *pred* defaults to :func:`bool`, which will select truthy items: + + >>> list(rlocate([0, 1, 1, 0, 1, 0, 0])) # Truthy at 1, 2, and 4 + [4, 2, 1] + + Set *pred* to a custom function to, e.g., find the indexes for a particular + item: + + >>> iterable = iter('abcb') + >>> pred = lambda x: x == 'b' + >>> list(rlocate(iterable, pred)) + [3, 1] + + If *window_size* is given, then the *pred* function will be called with + that many items. This enables searching for sub-sequences: + + >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3] + >>> pred = lambda *args: args == (1, 2, 3) + >>> list(rlocate(iterable, pred=pred, window_size=3)) + [9, 5, 1] + + Beware, this function won't return anything for infinite iterables. + If *iterable* is reversible, ``rlocate`` will reverse it and search from + the right. Otherwise, it will search from the left and return the results + in reverse order. + + See :func:`locate` to for other example applications. + + """ + if window_size is None: + try: + len_iter = len(iterable) + return (len_iter - i - 1 for i in locate(reversed(iterable), pred)) + except TypeError: + pass + + return reversed(list(locate(iterable, pred, window_size))) + + +def replace(iterable, pred, substitutes, count=None, window_size=1): + """Yield the items from *iterable*, replacing the items for which *pred* + returns ``True`` with the items from the iterable *substitutes*. + + >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1] + >>> pred = lambda x: x == 0 + >>> substitutes = (2, 3) + >>> list(replace(iterable, pred, substitutes)) + [1, 1, 2, 3, 1, 1, 2, 3, 1, 1] + + If *count* is given, the number of replacements will be limited: + + >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1, 0] + >>> pred = lambda x: x == 0 + >>> substitutes = [None] + >>> list(replace(iterable, pred, substitutes, count=2)) + [1, 1, None, 1, 1, None, 1, 1, 0] + + Use *window_size* to control the number of items passed as arguments to + *pred*. This allows for locating and replacing subsequences. + + >>> iterable = [0, 1, 2, 5, 0, 1, 2, 5] + >>> window_size = 3 + >>> pred = lambda *args: args == (0, 1, 2) # 3 items passed to pred + >>> substitutes = [3, 4] # Splice in these items + >>> list(replace(iterable, pred, substitutes, window_size=window_size)) + [3, 4, 5, 3, 4, 5] + + """ + if window_size < 1: + raise ValueError('window_size must be at least 1') + + # Save the substitutes iterable, since it's used more than once + substitutes = tuple(substitutes) + + # Add padding such that the number of windows matches the length of the + # iterable + it = chain(iterable, [_marker] * (window_size - 1)) + windows = windowed(it, window_size) + + n = 0 + for w in windows: + # If the current window matches our predicate (and we haven't hit + # our maximum number of replacements), splice in the substitutes + # and then consume the following windows that overlap with this one. + # For example, if the iterable is (0, 1, 2, 3, 4...) + # and the window size is 2, we have (0, 1), (1, 2), (2, 3)... + # If the predicate matches on (0, 1), we need to zap (0, 1) and (1, 2) + if pred(*w): + if (count is None) or (n < count): + n += 1 + yield from substitutes + consume(windows, window_size - 1) + continue + + # If there was no match (or we've reached the replacement limit), + # yield the first item from the window. + if w and (w[0] is not _marker): + yield w[0] + + +def partitions(iterable): + """Yield all possible order-preserving partitions of *iterable*. + + >>> iterable = 'abc' + >>> for part in partitions(iterable): + ... print([''.join(p) for p in part]) + ['abc'] + ['a', 'bc'] + ['ab', 'c'] + ['a', 'b', 'c'] + + This is unrelated to :func:`partition`. + + """ + sequence = list(iterable) + n = len(sequence) + for i in powerset(range(1, n)): + yield [sequence[i:j] for i, j in zip((0,) + i, i + (n,))] + + +def set_partitions(iterable, k=None): + """ + Yield the set partitions of *iterable* into *k* parts. Set partitions are + not order-preserving. + + >>> iterable = 'abc' + >>> for part in set_partitions(iterable, 2): + ... print([''.join(p) for p in part]) + ['a', 'bc'] + ['ab', 'c'] + ['b', 'ac'] + + + If *k* is not given, every set partition is generated. + + >>> iterable = 'abc' + >>> for part in set_partitions(iterable): + ... print([''.join(p) for p in part]) + ['abc'] + ['a', 'bc'] + ['ab', 'c'] + ['b', 'ac'] + ['a', 'b', 'c'] + + """ + L = list(iterable) + n = len(L) + if k is not None: + if k < 1: + raise ValueError( + "Can't partition in a negative or zero number of groups" + ) + elif k > n: + return + + def set_partitions_helper(L, k): + n = len(L) + if k == 1: + yield [L] + elif n == k: + yield [[s] for s in L] + else: + e, *M = L + for p in set_partitions_helper(M, k - 1): + yield [[e], *p] + for p in set_partitions_helper(M, k): + for i in range(len(p)): + yield p[:i] + [[e] + p[i]] + p[i + 1 :] + + if k is None: + for k in range(1, n + 1): + yield from set_partitions_helper(L, k) + else: + yield from set_partitions_helper(L, k) + + +class time_limited: + """ + Yield items from *iterable* until *limit_seconds* have passed. + If the time limit expires before all items have been yielded, the + ``timed_out`` parameter will be set to ``True``. + + >>> from time import sleep + >>> def generator(): + ... yield 1 + ... yield 2 + ... sleep(0.2) + ... yield 3 + >>> iterable = time_limited(0.1, generator()) + >>> list(iterable) + [1, 2] + >>> iterable.timed_out + True + + Note that the time is checked before each item is yielded, and iteration + stops if the time elapsed is greater than *limit_seconds*. If your time + limit is 1 second, but it takes 2 seconds to generate the first item from + the iterable, the function will run for 2 seconds and not yield anything. + + """ + + def __init__(self, limit_seconds, iterable): + if limit_seconds < 0: + raise ValueError('limit_seconds must be positive') + self.limit_seconds = limit_seconds + self._iterable = iter(iterable) + self._start_time = monotonic() + self.timed_out = False + + def __iter__(self): + return self + + def __next__(self): + item = next(self._iterable) + if monotonic() - self._start_time > self.limit_seconds: + self.timed_out = True + raise StopIteration + + return item + + +def only(iterable, default=None, too_long=None): + """If *iterable* has only one item, return it. + If it has zero items, return *default*. + If it has more than one item, raise the exception given by *too_long*, + which is ``ValueError`` by default. + + >>> only([], default='missing') + 'missing' + >>> only([1]) + 1 + >>> only([1, 2]) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + ValueError: Expected exactly one item in iterable, but got 1, 2, + and perhaps more.' + >>> only([1, 2], too_long=TypeError) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + TypeError + + Note that :func:`only` attempts to advance *iterable* twice to ensure there + is only one item. See :func:`spy` or :func:`peekable` to check + iterable contents less destructively. + """ + it = iter(iterable) + first_value = next(it, default) + + try: + second_value = next(it) + except StopIteration: + pass + else: + msg = ( + 'Expected exactly one item in iterable, but got {!r}, {!r}, ' + 'and perhaps more.'.format(first_value, second_value) + ) + raise too_long or ValueError(msg) + + return first_value + + +def ichunked(iterable, n): + """Break *iterable* into sub-iterables with *n* elements each. + :func:`ichunked` is like :func:`chunked`, but it yields iterables + instead of lists. + + If the sub-iterables are read in order, the elements of *iterable* + won't be stored in memory. + If they are read out of order, :func:`itertools.tee` is used to cache + elements as necessary. + + >>> from itertools import count + >>> all_chunks = ichunked(count(), 4) + >>> c_1, c_2, c_3 = next(all_chunks), next(all_chunks), next(all_chunks) + >>> list(c_2) # c_1's elements have been cached; c_3's haven't been + [4, 5, 6, 7] + >>> list(c_1) + [0, 1, 2, 3] + >>> list(c_3) + [8, 9, 10, 11] + + """ + source = iter(iterable) + + while True: + # Check to see whether we're at the end of the source iterable + item = next(source, _marker) + if item is _marker: + return + + # Clone the source and yield an n-length slice + source, it = tee(chain([item], source)) + yield islice(it, n) + + # Advance the source iterable + consume(source, n) + + +def distinct_combinations(iterable, r): + """Yield the distinct combinations of *r* items taken from *iterable*. + + >>> list(distinct_combinations([0, 0, 1], 2)) + [(0, 0), (0, 1)] + + Equivalent to ``set(combinations(iterable))``, except duplicates are not + generated and thrown away. For larger input sequences this is much more + efficient. + + """ + if r < 0: + raise ValueError('r must be non-negative') + elif r == 0: + yield () + return + pool = tuple(iterable) + generators = [unique_everseen(enumerate(pool), key=itemgetter(1))] + current_combo = [None] * r + level = 0 + while generators: + try: + cur_idx, p = next(generators[-1]) + except StopIteration: + generators.pop() + level -= 1 + continue + current_combo[level] = p + if level + 1 == r: + yield tuple(current_combo) + else: + generators.append( + unique_everseen( + enumerate(pool[cur_idx + 1 :], cur_idx + 1), + key=itemgetter(1), + ) + ) + level += 1 + + +def filter_except(validator, iterable, *exceptions): + """Yield the items from *iterable* for which the *validator* function does + not raise one of the specified *exceptions*. + + *validator* is called for each item in *iterable*. + It should be a function that accepts one argument and raises an exception + if that item is not valid. + + >>> iterable = ['1', '2', 'three', '4', None] + >>> list(filter_except(int, iterable, ValueError, TypeError)) + ['1', '2', '4'] + + If an exception other than one given by *exceptions* is raised by + *validator*, it is raised like normal. + """ + for item in iterable: + try: + validator(item) + except exceptions: + pass + else: + yield item + + +def map_except(function, iterable, *exceptions): + """Transform each item from *iterable* with *function* and yield the + result, unless *function* raises one of the specified *exceptions*. + + *function* is called to transform each item in *iterable*. + It should accept one argument. + + >>> iterable = ['1', '2', 'three', '4', None] + >>> list(map_except(int, iterable, ValueError, TypeError)) + [1, 2, 4] + + If an exception other than one given by *exceptions* is raised by + *function*, it is raised like normal. + """ + for item in iterable: + try: + yield function(item) + except exceptions: + pass + + +def map_if(iterable, pred, func, func_else=lambda x: x): + """Evaluate each item from *iterable* using *pred*. If the result is + equivalent to ``True``, transform the item with *func* and yield it. + Otherwise, transform the item with *func_else* and yield it. + + *pred*, *func*, and *func_else* should each be functions that accept + one argument. By default, *func_else* is the identity function. + + >>> from math import sqrt + >>> iterable = list(range(-5, 5)) + >>> iterable + [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4] + >>> list(map_if(iterable, lambda x: x > 3, lambda x: 'toobig')) + [-5, -4, -3, -2, -1, 0, 1, 2, 3, 'toobig'] + >>> list(map_if(iterable, lambda x: x >= 0, + ... lambda x: f'{sqrt(x):.2f}', lambda x: None)) + [None, None, None, None, None, '0.00', '1.00', '1.41', '1.73', '2.00'] + """ + for item in iterable: + yield func(item) if pred(item) else func_else(item) + + +def _sample_unweighted(iterable, k): + # Implementation of "Algorithm L" from the 1994 paper by Kim-Hung Li: + # "Reservoir-Sampling Algorithms of Time Complexity O(n(1+log(N/n)))". + + # Fill up the reservoir (collection of samples) with the first `k` samples + reservoir = take(k, iterable) + + # Generate random number that's the largest in a sample of k U(0,1) numbers + # Largest order statistic: https://en.wikipedia.org/wiki/Order_statistic + W = exp(log(random()) / k) + + # The number of elements to skip before changing the reservoir is a random + # number with a geometric distribution. Sample it using random() and logs. + next_index = k + floor(log(random()) / log(1 - W)) + + for index, element in enumerate(iterable, k): + + if index == next_index: + reservoir[randrange(k)] = element + # The new W is the largest in a sample of k U(0, `old_W`) numbers + W *= exp(log(random()) / k) + next_index += floor(log(random()) / log(1 - W)) + 1 + + return reservoir + + +def _sample_weighted(iterable, k, weights): + # Implementation of "A-ExpJ" from the 2006 paper by Efraimidis et al. : + # "Weighted random sampling with a reservoir". + + # Log-transform for numerical stability for weights that are small/large + weight_keys = (log(random()) / weight for weight in weights) + + # Fill up the reservoir (collection of samples) with the first `k` + # weight-keys and elements, then heapify the list. + reservoir = take(k, zip(weight_keys, iterable)) + heapify(reservoir) + + # The number of jumps before changing the reservoir is a random variable + # with an exponential distribution. Sample it using random() and logs. + smallest_weight_key, _ = reservoir[0] + weights_to_skip = log(random()) / smallest_weight_key + + for weight, element in zip(weights, iterable): + if weight >= weights_to_skip: + # The notation here is consistent with the paper, but we store + # the weight-keys in log-space for better numerical stability. + smallest_weight_key, _ = reservoir[0] + t_w = exp(weight * smallest_weight_key) + r_2 = uniform(t_w, 1) # generate U(t_w, 1) + weight_key = log(r_2) / weight + heapreplace(reservoir, (weight_key, element)) + smallest_weight_key, _ = reservoir[0] + weights_to_skip = log(random()) / smallest_weight_key + else: + weights_to_skip -= weight + + # Equivalent to [element for weight_key, element in sorted(reservoir)] + return [heappop(reservoir)[1] for _ in range(k)] + + +def sample(iterable, k, weights=None): + """Return a *k*-length list of elements chosen (without replacement) + from the *iterable*. Like :func:`random.sample`, but works on iterables + of unknown length. + + >>> iterable = range(100) + >>> sample(iterable, 5) # doctest: +SKIP + [81, 60, 96, 16, 4] + + An iterable with *weights* may also be given: + + >>> iterable = range(100) + >>> weights = (i * i + 1 for i in range(100)) + >>> sampled = sample(iterable, 5, weights=weights) # doctest: +SKIP + [79, 67, 74, 66, 78] + + The algorithm can also be used to generate weighted random permutations. + The relative weight of each item determines the probability that it + appears late in the permutation. + + >>> data = "abcdefgh" + >>> weights = range(1, len(data) + 1) + >>> sample(data, k=len(data), weights=weights) # doctest: +SKIP + ['c', 'a', 'b', 'e', 'g', 'd', 'h', 'f'] + """ + if k == 0: + return [] + + iterable = iter(iterable) + if weights is None: + return _sample_unweighted(iterable, k) + else: + weights = iter(weights) + return _sample_weighted(iterable, k, weights) + + +def is_sorted(iterable, key=None, reverse=False, strict=False): + """Returns ``True`` if the items of iterable are in sorted order, and + ``False`` otherwise. *key* and *reverse* have the same meaning that they do + in the built-in :func:`sorted` function. + + >>> is_sorted(['1', '2', '3', '4', '5'], key=int) + True + >>> is_sorted([5, 4, 3, 1, 2], reverse=True) + False + + If *strict*, tests for strict sorting, that is, returns ``False`` if equal + elements are found: + + >>> is_sorted([1, 2, 2]) + True + >>> is_sorted([1, 2, 2], strict=True) + False + + The function returns ``False`` after encountering the first out-of-order + item. If there are no out-of-order items, the iterable is exhausted. + """ + + compare = (le if reverse else ge) if strict else (lt if reverse else gt) + it = iterable if key is None else map(key, iterable) + return not any(starmap(compare, pairwise(it))) + + +class AbortThread(BaseException): + pass + + +class callback_iter: + """Convert a function that uses callbacks to an iterator. + + Let *func* be a function that takes a `callback` keyword argument. + For example: + + >>> def func(callback=None): + ... for i, c in [(1, 'a'), (2, 'b'), (3, 'c')]: + ... if callback: + ... callback(i, c) + ... return 4 + + + Use ``with callback_iter(func)`` to get an iterator over the parameters + that are delivered to the callback. + + >>> with callback_iter(func) as it: + ... for args, kwargs in it: + ... print(args) + (1, 'a') + (2, 'b') + (3, 'c') + + The function will be called in a background thread. The ``done`` property + indicates whether it has completed execution. + + >>> it.done + True + + If it completes successfully, its return value will be available + in the ``result`` property. + + >>> it.result + 4 + + Notes: + + * If the function uses some keyword argument besides ``callback``, supply + *callback_kwd*. + * If it finished executing, but raised an exception, accessing the + ``result`` property will raise the same exception. + * If it hasn't finished executing, accessing the ``result`` + property from within the ``with`` block will raise ``RuntimeError``. + * If it hasn't finished executing, accessing the ``result`` property from + outside the ``with`` block will raise a + ``more_itertools.AbortThread`` exception. + * Provide *wait_seconds* to adjust how frequently the it is polled for + output. + + """ + + def __init__(self, func, callback_kwd='callback', wait_seconds=0.1): + self._func = func + self._callback_kwd = callback_kwd + self._aborted = False + self._future = None + self._wait_seconds = wait_seconds + self._executor = __import__("concurrent.futures").futures.ThreadPoolExecutor(max_workers=1) + self._iterator = self._reader() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self._aborted = True + self._executor.shutdown() + + def __iter__(self): + return self + + def __next__(self): + return next(self._iterator) + + @property + def done(self): + if self._future is None: + return False + return self._future.done() + + @property + def result(self): + if not self.done: + raise RuntimeError('Function has not yet completed') + + return self._future.result() + + def _reader(self): + q = Queue() + + def callback(*args, **kwargs): + if self._aborted: + raise AbortThread('canceled by user') + + q.put((args, kwargs)) + + self._future = self._executor.submit( + self._func, **{self._callback_kwd: callback} + ) + + while True: + try: + item = q.get(timeout=self._wait_seconds) + except Empty: + pass + else: + q.task_done() + yield item + + if self._future.done(): + break + + remaining = [] + while True: + try: + item = q.get_nowait() + except Empty: + break + else: + q.task_done() + remaining.append(item) + q.join() + yield from remaining + + +def windowed_complete(iterable, n): + """ + Yield ``(beginning, middle, end)`` tuples, where: + + * Each ``middle`` has *n* items from *iterable* + * Each ``beginning`` has the items before the ones in ``middle`` + * Each ``end`` has the items after the ones in ``middle`` + + >>> iterable = range(7) + >>> n = 3 + >>> for beginning, middle, end in windowed_complete(iterable, n): + ... print(beginning, middle, end) + () (0, 1, 2) (3, 4, 5, 6) + (0,) (1, 2, 3) (4, 5, 6) + (0, 1) (2, 3, 4) (5, 6) + (0, 1, 2) (3, 4, 5) (6,) + (0, 1, 2, 3) (4, 5, 6) () + + Note that *n* must be at least 0 and most equal to the length of + *iterable*. + + This function will exhaust the iterable and may require significant + storage. + """ + if n < 0: + raise ValueError('n must be >= 0') + + seq = tuple(iterable) + size = len(seq) + + if n > size: + raise ValueError('n must be <= len(seq)') + + for i in range(size - n + 1): + beginning = seq[:i] + middle = seq[i : i + n] + end = seq[i + n :] + yield beginning, middle, end + + +def all_unique(iterable, key=None): + """ + Returns ``True`` if all the elements of *iterable* are unique (no two + elements are equal). + + >>> all_unique('ABCB') + False + + If a *key* function is specified, it will be used to make comparisons. + + >>> all_unique('ABCb') + True + >>> all_unique('ABCb', str.lower) + False + + The function returns as soon as the first non-unique element is + encountered. Iterables with a mix of hashable and unhashable items can + be used, but the function will be slower for unhashable items. + """ + seenset = set() + seenset_add = seenset.add + seenlist = [] + seenlist_add = seenlist.append + for element in map(key, iterable) if key else iterable: + try: + if element in seenset: + return False + seenset_add(element) + except TypeError: + if element in seenlist: + return False + seenlist_add(element) + return True + + +def nth_product(index, *args): + """Equivalent to ``list(product(*args))[index]``. + + The products of *args* can be ordered lexicographically. + :func:`nth_product` computes the product at sort position *index* without + computing the previous products. + + >>> nth_product(8, range(2), range(2), range(2), range(2)) + (1, 0, 0, 0) + + ``IndexError`` will be raised if the given *index* is invalid. + """ + pools = list(map(tuple, reversed(args))) + ns = list(map(len, pools)) + + c = reduce(mul, ns) + + if index < 0: + index += c + + if not 0 <= index < c: + raise IndexError + + result = [] + for pool, n in zip(pools, ns): + result.append(pool[index % n]) + index //= n + + return tuple(reversed(result)) + + +def nth_permutation(iterable, r, index): + """Equivalent to ``list(permutations(iterable, r))[index]``` + + The subsequences of *iterable* that are of length *r* where order is + important can be ordered lexicographically. :func:`nth_permutation` + computes the subsequence at sort position *index* directly, without + computing the previous subsequences. + + >>> nth_permutation('ghijk', 2, 5) + ('h', 'i') + + ``ValueError`` will be raised If *r* is negative or greater than the length + of *iterable*. + ``IndexError`` will be raised if the given *index* is invalid. + """ + pool = list(iterable) + n = len(pool) + + if r is None or r == n: + r, c = n, factorial(n) + elif not 0 <= r < n: + raise ValueError + else: + c = factorial(n) // factorial(n - r) + + if index < 0: + index += c + + if not 0 <= index < c: + raise IndexError + + if c == 0: + return tuple() + + result = [0] * r + q = index * factorial(n) // c if r < n else index + for d in range(1, n + 1): + q, i = divmod(q, d) + if 0 <= n - d < r: + result[n - d] = i + if q == 0: + break + + return tuple(map(pool.pop, result)) + + +def value_chain(*args): + """Yield all arguments passed to the function in the same order in which + they were passed. If an argument itself is iterable then iterate over its + values. + + >>> list(value_chain(1, 2, 3, [4, 5, 6])) + [1, 2, 3, 4, 5, 6] + + Binary and text strings are not considered iterable and are emitted + as-is: + + >>> list(value_chain('12', '34', ['56', '78'])) + ['12', '34', '56', '78'] + + + Multiple levels of nesting are not flattened. + + """ + for value in args: + if isinstance(value, (str, bytes)): + yield value + continue + try: + yield from value + except TypeError: + yield value + + +def product_index(element, *args): + """Equivalent to ``list(product(*args)).index(element)`` + + The products of *args* can be ordered lexicographically. + :func:`product_index` computes the first index of *element* without + computing the previous products. + + >>> product_index([8, 2], range(10), range(5)) + 42 + + ``ValueError`` will be raised if the given *element* isn't in the product + of *args*. + """ + index = 0 + + for x, pool in zip_longest(element, args, fillvalue=_marker): + if x is _marker or pool is _marker: + raise ValueError('element is not a product of args') + + pool = tuple(pool) + index = index * len(pool) + pool.index(x) + + return index + + +def combination_index(element, iterable): + """Equivalent to ``list(combinations(iterable, r)).index(element)`` + + The subsequences of *iterable* that are of length *r* can be ordered + lexicographically. :func:`combination_index` computes the index of the + first *element*, without computing the previous combinations. + + >>> combination_index('adf', 'abcdefg') + 10 + + ``ValueError`` will be raised if the given *element* isn't one of the + combinations of *iterable*. + """ + element = enumerate(element) + k, y = next(element, (None, None)) + if k is None: + return 0 + + indexes = [] + pool = enumerate(iterable) + for n, x in pool: + if x == y: + indexes.append(n) + tmp, y = next(element, (None, None)) + if tmp is None: + break + else: + k = tmp + else: + raise ValueError('element is not a combination of iterable') + + n, _ = last(pool, default=(n, None)) + + # Python versiosn below 3.8 don't have math.comb + index = 1 + for i, j in enumerate(reversed(indexes), start=1): + j = n - j + if i <= j: + index += factorial(j) // (factorial(i) * factorial(j - i)) + + return factorial(n + 1) // (factorial(k + 1) * factorial(n - k)) - index + + +def permutation_index(element, iterable): + """Equivalent to ``list(permutations(iterable, r)).index(element)``` + + The subsequences of *iterable* that are of length *r* where order is + important can be ordered lexicographically. :func:`permutation_index` + computes the index of the first *element* directly, without computing + the previous permutations. + + >>> permutation_index([1, 3, 2], range(5)) + 19 + + ``ValueError`` will be raised if the given *element* isn't one of the + permutations of *iterable*. + """ + index = 0 + pool = list(iterable) + for i, x in zip(range(len(pool), -1, -1), element): + r = pool.index(x) + index = index * i + r + del pool[r] + + return index + + +class countable: + """Wrap *iterable* and keep a count of how many items have been consumed. + + The ``items_seen`` attribute starts at ``0`` and increments as the iterable + is consumed: + + >>> iterable = map(str, range(10)) + >>> it = countable(iterable) + >>> it.items_seen + 0 + >>> next(it), next(it) + ('0', '1') + >>> list(it) + ['2', '3', '4', '5', '6', '7', '8', '9'] + >>> it.items_seen + 10 + """ + + def __init__(self, iterable): + self._it = iter(iterable) + self.items_seen = 0 + + def __iter__(self): + return self + + def __next__(self): + item = next(self._it) + self.items_seen += 1 + + return item + + +def chunked_even(iterable, n): + """Break *iterable* into lists of approximately length *n*. + Items are distributed such the lengths of the lists differ by at most + 1 item. + + >>> iterable = [1, 2, 3, 4, 5, 6, 7] + >>> n = 3 + >>> list(chunked_even(iterable, n)) # List lengths: 3, 2, 2 + [[1, 2, 3], [4, 5], [6, 7]] + >>> list(chunked(iterable, n)) # List lengths: 3, 3, 1 + [[1, 2, 3], [4, 5, 6], [7]] + + """ + + len_method = getattr(iterable, '__len__', None) + + if len_method is None: + return _chunked_even_online(iterable, n) + else: + return _chunked_even_finite(iterable, len_method(), n) + + +def _chunked_even_online(iterable, n): + buffer = [] + maxbuf = n + (n - 2) * (n - 1) + for x in iterable: + buffer.append(x) + if len(buffer) == maxbuf: + yield buffer[:n] + buffer = buffer[n:] + yield from _chunked_even_finite(buffer, len(buffer), n) + + +def _chunked_even_finite(iterable, N, n): + if N < 1: + return + + # Lists are either size `full_size <= n` or `partial_size = full_size - 1` + q, r = divmod(N, n) + num_lists = q + (1 if r > 0 else 0) + q, r = divmod(N, num_lists) + full_size = q + (1 if r > 0 else 0) + partial_size = full_size - 1 + num_full = N - partial_size * num_lists + num_partial = num_lists - num_full + + buffer = [] + iterator = iter(iterable) + + # Yield num_full lists of full_size + for x in iterator: + buffer.append(x) + if len(buffer) == full_size: + yield buffer + buffer = [] + num_full -= 1 + if num_full <= 0: + break + + # Yield num_partial lists of partial_size + for x in iterator: + buffer.append(x) + if len(buffer) == partial_size: + yield buffer + buffer = [] + num_partial -= 1 + + +def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False): + """A version of :func:`zip` that "broadcasts" any scalar + (i.e., non-iterable) items into output tuples. + + >>> iterable_1 = [1, 2, 3] + >>> iterable_2 = ['a', 'b', 'c'] + >>> scalar = '_' + >>> list(zip_broadcast(iterable_1, iterable_2, scalar)) + [(1, 'a', '_'), (2, 'b', '_'), (3, 'c', '_')] + + The *scalar_types* keyword argument determines what types are considered + scalar. It is set to ``(str, bytes)`` by default. Set it to ``None`` to + treat strings and byte strings as iterable: + + >>> list(zip_broadcast('abc', 0, 'xyz', scalar_types=None)) + [('a', 0, 'x'), ('b', 0, 'y'), ('c', 0, 'z')] + + If the *strict* keyword argument is ``True``, then + ``UnequalIterablesError`` will be raised if any of the iterables have + different lengthss. + """ + + def is_scalar(obj): + if scalar_types and isinstance(obj, scalar_types): + return True + try: + iter(obj) + except TypeError: + return True + else: + return False + + size = len(objects) + if not size: + return + + iterables, iterable_positions = [], [] + scalars, scalar_positions = [], [] + for i, obj in enumerate(objects): + if is_scalar(obj): + scalars.append(obj) + scalar_positions.append(i) + else: + iterables.append(iter(obj)) + iterable_positions.append(i) + + if len(scalars) == size: + yield tuple(objects) + return + + zipper = _zip_equal if strict else zip + for item in zipper(*iterables): + new_item = [None] * size + + for i, elem in zip(iterable_positions, item): + new_item[i] = elem + + for i, elem in zip(scalar_positions, scalars): + new_item[i] = elem + + yield tuple(new_item) + + +def unique_in_window(iterable, n, key=None): + """Yield the items from *iterable* that haven't been seen recently. + *n* is the size of the lookback window. + + >>> iterable = [0, 1, 0, 2, 3, 0] + >>> n = 3 + >>> list(unique_in_window(iterable, n)) + [0, 1, 2, 3, 0] + + The *key* function, if provided, will be used to determine uniqueness: + + >>> list(unique_in_window('abAcda', 3, key=lambda x: x.lower())) + ['a', 'b', 'c', 'd', 'a'] + + The items in *iterable* must be hashable. + + """ + if n <= 0: + raise ValueError('n must be greater than 0') + + window = deque(maxlen=n) + uniques = set() + use_key = key is not None + + for item in iterable: + k = key(item) if use_key else item + if k in uniques: + continue + + if len(uniques) == n: + uniques.discard(window[0]) + + uniques.add(k) + window.append(k) + + yield item + + +def duplicates_everseen(iterable, key=None): + """Yield duplicate elements after their first appearance. + + >>> list(duplicates_everseen('mississippi')) + ['s', 'i', 's', 's', 'i', 'p', 'i'] + >>> list(duplicates_everseen('AaaBbbCccAaa', str.lower)) + ['a', 'a', 'b', 'b', 'c', 'c', 'A', 'a', 'a'] + + This function is analagous to :func:`unique_everseen` and is subject to + the same performance considerations. + + """ + seen_set = set() + seen_list = [] + use_key = key is not None + + for element in iterable: + k = key(element) if use_key else element + try: + if k not in seen_set: + seen_set.add(k) + else: + yield element + except TypeError: + if k not in seen_list: + seen_list.append(k) + else: + yield element + + +def duplicates_justseen(iterable, key=None): + """Yields serially-duplicate elements after their first appearance. + + >>> list(duplicates_justseen('mississippi')) + ['s', 's', 'p'] + >>> list(duplicates_justseen('AaaBbbCccAaa', str.lower)) + ['a', 'a', 'b', 'b', 'c', 'c', 'a', 'a'] + + This function is analagous to :func:`unique_justseen`. + + """ + return flatten( + map( + lambda group_tuple: islice_extended(group_tuple[1])[1:], + groupby(iterable, key), + ) + ) + + +def minmax(iterable_or_value, *others, key=None, default=_marker): + """Returns both the smallest and largest items in an iterable + or the largest of two or more arguments. + + >>> minmax([3, 1, 5]) + (1, 5) + + >>> minmax(4, 2, 6) + (2, 6) + + If a *key* function is provided, it will be used to transform the input + items for comparison. + + >>> minmax([5, 30], key=str) # '30' sorts before '5' + (30, 5) + + If a *default* value is provided, it will be returned if there are no + input items. + + >>> minmax([], default=(0, 0)) + (0, 0) + + Otherwise ``ValueError`` is raised. + + This function is based on the + `recipe <http://code.activestate.com/recipes/577916/>`__ by + Raymond Hettinger and takes care to minimize the number of comparisons + performed. + """ + iterable = (iterable_or_value, *others) if others else iterable_or_value + + it = iter(iterable) + + try: + lo = hi = next(it) + except StopIteration as e: + if default is _marker: + raise ValueError( + '`minmax()` argument is an empty iterable. ' + 'Provide a `default` value to suppress this error.' + ) from e + return default + + # Different branches depending on the presence of key. This saves a lot + # of unimportant copies which would slow the "key=None" branch + # significantly down. + if key is None: + for x, y in zip_longest(it, it, fillvalue=lo): + if y < x: + x, y = y, x + if x < lo: + lo = x + if hi < y: + hi = y + + else: + lo_key = hi_key = key(lo) + + for x, y in zip_longest(it, it, fillvalue=lo): + + x_key, y_key = key(x), key(y) + + if y_key < x_key: + x, y, x_key, y_key = y, x, y_key, x_key + if x_key < lo_key: + lo, lo_key = x, x_key + if hi_key < y_key: + hi, hi_key = y, y_key + + return lo, hi diff --git a/pkg_resources/_vendor/more_itertools/more.pyi b/pkg_resources/_vendor/more_itertools/more.pyi new file mode 100644 index 00000000..fe7d4bdd --- /dev/null +++ b/pkg_resources/_vendor/more_itertools/more.pyi @@ -0,0 +1,664 @@ +"""Stubs for more_itertools.more""" + +from typing import ( + Any, + Callable, + Container, + Dict, + Generic, + Hashable, + Iterable, + Iterator, + List, + Optional, + Reversible, + Sequence, + Sized, + Tuple, + Union, + TypeVar, + type_check_only, +) +from types import TracebackType +from typing_extensions import ContextManager, Protocol, Type, overload + +# Type and type variable definitions +_T = TypeVar('_T') +_T1 = TypeVar('_T1') +_T2 = TypeVar('_T2') +_U = TypeVar('_U') +_V = TypeVar('_V') +_W = TypeVar('_W') +_T_co = TypeVar('_T_co', covariant=True) +_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[object]]) +_Raisable = Union[BaseException, 'Type[BaseException]'] + +@type_check_only +class _SizedIterable(Protocol[_T_co], Sized, Iterable[_T_co]): ... + +@type_check_only +class _SizedReversible(Protocol[_T_co], Sized, Reversible[_T_co]): ... + +def chunked( + iterable: Iterable[_T], n: Optional[int], strict: bool = ... +) -> Iterator[List[_T]]: ... +@overload +def first(iterable: Iterable[_T]) -> _T: ... +@overload +def first(iterable: Iterable[_T], default: _U) -> Union[_T, _U]: ... +@overload +def last(iterable: Iterable[_T]) -> _T: ... +@overload +def last(iterable: Iterable[_T], default: _U) -> Union[_T, _U]: ... +@overload +def nth_or_last(iterable: Iterable[_T], n: int) -> _T: ... +@overload +def nth_or_last( + iterable: Iterable[_T], n: int, default: _U +) -> Union[_T, _U]: ... + +class peekable(Generic[_T], Iterator[_T]): + def __init__(self, iterable: Iterable[_T]) -> None: ... + def __iter__(self) -> peekable[_T]: ... + def __bool__(self) -> bool: ... + @overload + def peek(self) -> _T: ... + @overload + def peek(self, default: _U) -> Union[_T, _U]: ... + def prepend(self, *items: _T) -> None: ... + def __next__(self) -> _T: ... + @overload + def __getitem__(self, index: int) -> _T: ... + @overload + def __getitem__(self, index: slice) -> List[_T]: ... + +def collate(*iterables: Iterable[_T], **kwargs: Any) -> Iterable[_T]: ... +def consumer(func: _GenFn) -> _GenFn: ... +def ilen(iterable: Iterable[object]) -> int: ... +def iterate(func: Callable[[_T], _T], start: _T) -> Iterator[_T]: ... +def with_iter( + context_manager: ContextManager[Iterable[_T]], +) -> Iterator[_T]: ... +def one( + iterable: Iterable[_T], + too_short: Optional[_Raisable] = ..., + too_long: Optional[_Raisable] = ..., +) -> _T: ... +def raise_(exception: _Raisable, *args: Any) -> None: ... +def strictly_n( + iterable: Iterable[_T], + n: int, + too_short: Optional[_GenFn] = ..., + too_long: Optional[_GenFn] = ..., +) -> List[_T]: ... +def distinct_permutations( + iterable: Iterable[_T], r: Optional[int] = ... +) -> Iterator[Tuple[_T, ...]]: ... +def intersperse( + e: _U, iterable: Iterable[_T], n: int = ... +) -> Iterator[Union[_T, _U]]: ... +def unique_to_each(*iterables: Iterable[_T]) -> List[List[_T]]: ... +@overload +def windowed( + seq: Iterable[_T], n: int, *, step: int = ... +) -> Iterator[Tuple[Optional[_T], ...]]: ... +@overload +def windowed( + seq: Iterable[_T], n: int, fillvalue: _U, step: int = ... +) -> Iterator[Tuple[Union[_T, _U], ...]]: ... +def substrings(iterable: Iterable[_T]) -> Iterator[Tuple[_T, ...]]: ... +def substrings_indexes( + seq: Sequence[_T], reverse: bool = ... +) -> Iterator[Tuple[Sequence[_T], int, int]]: ... + +class bucket(Generic[_T, _U], Container[_U]): + def __init__( + self, + iterable: Iterable[_T], + key: Callable[[_T], _U], + validator: Optional[Callable[[object], object]] = ..., + ) -> None: ... + def __contains__(self, value: object) -> bool: ... + def __iter__(self) -> Iterator[_U]: ... + def __getitem__(self, value: object) -> Iterator[_T]: ... + +def spy( + iterable: Iterable[_T], n: int = ... +) -> Tuple[List[_T], Iterator[_T]]: ... +def interleave(*iterables: Iterable[_T]) -> Iterator[_T]: ... +def interleave_longest(*iterables: Iterable[_T]) -> Iterator[_T]: ... +def interleave_evenly( + iterables: List[Iterable[_T]], lengths: Optional[List[int]] = ... +) -> Iterator[_T]: ... +def collapse( + iterable: Iterable[Any], + base_type: Optional[type] = ..., + levels: Optional[int] = ..., +) -> Iterator[Any]: ... +@overload +def side_effect( + func: Callable[[_T], object], + iterable: Iterable[_T], + chunk_size: None = ..., + before: Optional[Callable[[], object]] = ..., + after: Optional[Callable[[], object]] = ..., +) -> Iterator[_T]: ... +@overload +def side_effect( + func: Callable[[List[_T]], object], + iterable: Iterable[_T], + chunk_size: int, + before: Optional[Callable[[], object]] = ..., + after: Optional[Callable[[], object]] = ..., +) -> Iterator[_T]: ... +def sliced( + seq: Sequence[_T], n: int, strict: bool = ... +) -> Iterator[Sequence[_T]]: ... +def split_at( + iterable: Iterable[_T], + pred: Callable[[_T], object], + maxsplit: int = ..., + keep_separator: bool = ..., +) -> Iterator[List[_T]]: ... +def split_before( + iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ... +) -> Iterator[List[_T]]: ... +def split_after( + iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ... +) -> Iterator[List[_T]]: ... +def split_when( + iterable: Iterable[_T], + pred: Callable[[_T, _T], object], + maxsplit: int = ..., +) -> Iterator[List[_T]]: ... +def split_into( + iterable: Iterable[_T], sizes: Iterable[Optional[int]] +) -> Iterator[List[_T]]: ... +@overload +def padded( + iterable: Iterable[_T], + *, + n: Optional[int] = ..., + next_multiple: bool = ... +) -> Iterator[Optional[_T]]: ... +@overload +def padded( + iterable: Iterable[_T], + fillvalue: _U, + n: Optional[int] = ..., + next_multiple: bool = ..., +) -> Iterator[Union[_T, _U]]: ... +@overload +def repeat_last(iterable: Iterable[_T]) -> Iterator[_T]: ... +@overload +def repeat_last( + iterable: Iterable[_T], default: _U +) -> Iterator[Union[_T, _U]]: ... +def distribute(n: int, iterable: Iterable[_T]) -> List[Iterator[_T]]: ... +@overload +def stagger( + iterable: Iterable[_T], + offsets: _SizedIterable[int] = ..., + longest: bool = ..., +) -> Iterator[Tuple[Optional[_T], ...]]: ... +@overload +def stagger( + iterable: Iterable[_T], + offsets: _SizedIterable[int] = ..., + longest: bool = ..., + fillvalue: _U = ..., +) -> Iterator[Tuple[Union[_T, _U], ...]]: ... + +class UnequalIterablesError(ValueError): + def __init__( + self, details: Optional[Tuple[int, int, int]] = ... + ) -> None: ... + +@overload +def zip_equal(__iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ... +@overload +def zip_equal( + __iter1: Iterable[_T1], __iter2: Iterable[_T2] +) -> Iterator[Tuple[_T1, _T2]]: ... +@overload +def zip_equal( + __iter1: Iterable[_T], + __iter2: Iterable[_T], + __iter3: Iterable[_T], + *iterables: Iterable[_T] +) -> Iterator[Tuple[_T, ...]]: ... +@overload +def zip_offset( + __iter1: Iterable[_T1], + *, + offsets: _SizedIterable[int], + longest: bool = ..., + fillvalue: None = None +) -> Iterator[Tuple[Optional[_T1]]]: ... +@overload +def zip_offset( + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + *, + offsets: _SizedIterable[int], + longest: bool = ..., + fillvalue: None = None +) -> Iterator[Tuple[Optional[_T1], Optional[_T2]]]: ... +@overload +def zip_offset( + __iter1: Iterable[_T], + __iter2: Iterable[_T], + __iter3: Iterable[_T], + *iterables: Iterable[_T], + offsets: _SizedIterable[int], + longest: bool = ..., + fillvalue: None = None +) -> Iterator[Tuple[Optional[_T], ...]]: ... +@overload +def zip_offset( + __iter1: Iterable[_T1], + *, + offsets: _SizedIterable[int], + longest: bool = ..., + fillvalue: _U, +) -> Iterator[Tuple[Union[_T1, _U]]]: ... +@overload +def zip_offset( + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + *, + offsets: _SizedIterable[int], + longest: bool = ..., + fillvalue: _U, +) -> Iterator[Tuple[Union[_T1, _U], Union[_T2, _U]]]: ... +@overload +def zip_offset( + __iter1: Iterable[_T], + __iter2: Iterable[_T], + __iter3: Iterable[_T], + *iterables: Iterable[_T], + offsets: _SizedIterable[int], + longest: bool = ..., + fillvalue: _U, +) -> Iterator[Tuple[Union[_T, _U], ...]]: ... +def sort_together( + iterables: Iterable[Iterable[_T]], + key_list: Iterable[int] = ..., + key: Optional[Callable[..., Any]] = ..., + reverse: bool = ..., +) -> List[Tuple[_T, ...]]: ... +def unzip(iterable: Iterable[Sequence[_T]]) -> Tuple[Iterator[_T], ...]: ... +def divide(n: int, iterable: Iterable[_T]) -> List[Iterator[_T]]: ... +def always_iterable( + obj: object, + base_type: Union[ + type, Tuple[Union[type, Tuple[Any, ...]], ...], None + ] = ..., +) -> Iterator[Any]: ... +def adjacent( + predicate: Callable[[_T], bool], + iterable: Iterable[_T], + distance: int = ..., +) -> Iterator[Tuple[bool, _T]]: ... +@overload +def groupby_transform( + iterable: Iterable[_T], + keyfunc: None = None, + valuefunc: None = None, + reducefunc: None = None, +) -> Iterator[Tuple[_T, Iterator[_T]]]: ... +@overload +def groupby_transform( + iterable: Iterable[_T], + keyfunc: Callable[[_T], _U], + valuefunc: None, + reducefunc: None, +) -> Iterator[Tuple[_U, Iterator[_T]]]: ... +@overload +def groupby_transform( + iterable: Iterable[_T], + keyfunc: None, + valuefunc: Callable[[_T], _V], + reducefunc: None, +) -> Iterable[Tuple[_T, Iterable[_V]]]: ... +@overload +def groupby_transform( + iterable: Iterable[_T], + keyfunc: Callable[[_T], _U], + valuefunc: Callable[[_T], _V], + reducefunc: None, +) -> Iterable[Tuple[_U, Iterator[_V]]]: ... +@overload +def groupby_transform( + iterable: Iterable[_T], + keyfunc: None, + valuefunc: None, + reducefunc: Callable[[Iterator[_T]], _W], +) -> Iterable[Tuple[_T, _W]]: ... +@overload +def groupby_transform( + iterable: Iterable[_T], + keyfunc: Callable[[_T], _U], + valuefunc: None, + reducefunc: Callable[[Iterator[_T]], _W], +) -> Iterable[Tuple[_U, _W]]: ... +@overload +def groupby_transform( + iterable: Iterable[_T], + keyfunc: None, + valuefunc: Callable[[_T], _V], + reducefunc: Callable[[Iterable[_V]], _W], +) -> Iterable[Tuple[_T, _W]]: ... +@overload +def groupby_transform( + iterable: Iterable[_T], + keyfunc: Callable[[_T], _U], + valuefunc: Callable[[_T], _V], + reducefunc: Callable[[Iterable[_V]], _W], +) -> Iterable[Tuple[_U, _W]]: ... + +class numeric_range(Generic[_T, _U], Sequence[_T], Hashable, Reversible[_T]): + @overload + def __init__(self, __stop: _T) -> None: ... + @overload + def __init__(self, __start: _T, __stop: _T) -> None: ... + @overload + def __init__(self, __start: _T, __stop: _T, __step: _U) -> None: ... + def __bool__(self) -> bool: ... + def __contains__(self, elem: object) -> bool: ... + def __eq__(self, other: object) -> bool: ... + @overload + def __getitem__(self, key: int) -> _T: ... + @overload + def __getitem__(self, key: slice) -> numeric_range[_T, _U]: ... + def __hash__(self) -> int: ... + def __iter__(self) -> Iterator[_T]: ... + def __len__(self) -> int: ... + def __reduce__( + self, + ) -> Tuple[Type[numeric_range[_T, _U]], Tuple[_T, _T, _U]]: ... + def __repr__(self) -> str: ... + def __reversed__(self) -> Iterator[_T]: ... + def count(self, value: _T) -> int: ... + def index(self, value: _T) -> int: ... # type: ignore + +def count_cycle( + iterable: Iterable[_T], n: Optional[int] = ... +) -> Iterable[Tuple[int, _T]]: ... +def mark_ends( + iterable: Iterable[_T], +) -> Iterable[Tuple[bool, bool, _T]]: ... +def locate( + iterable: Iterable[object], + pred: Callable[..., Any] = ..., + window_size: Optional[int] = ..., +) -> Iterator[int]: ... +def lstrip( + iterable: Iterable[_T], pred: Callable[[_T], object] +) -> Iterator[_T]: ... +def rstrip( + iterable: Iterable[_T], pred: Callable[[_T], object] +) -> Iterator[_T]: ... +def strip( + iterable: Iterable[_T], pred: Callable[[_T], object] +) -> Iterator[_T]: ... + +class islice_extended(Generic[_T], Iterator[_T]): + def __init__( + self, iterable: Iterable[_T], *args: Optional[int] + ) -> None: ... + def __iter__(self) -> islice_extended[_T]: ... + def __next__(self) -> _T: ... + def __getitem__(self, index: slice) -> islice_extended[_T]: ... + +def always_reversible(iterable: Iterable[_T]) -> Iterator[_T]: ... +def consecutive_groups( + iterable: Iterable[_T], ordering: Callable[[_T], int] = ... +) -> Iterator[Iterator[_T]]: ... +@overload +def difference( + iterable: Iterable[_T], + func: Callable[[_T, _T], _U] = ..., + *, + initial: None = ... +) -> Iterator[Union[_T, _U]]: ... +@overload +def difference( + iterable: Iterable[_T], func: Callable[[_T, _T], _U] = ..., *, initial: _U +) -> Iterator[_U]: ... + +class SequenceView(Generic[_T], Sequence[_T]): + def __init__(self, target: Sequence[_T]) -> None: ... + @overload + def __getitem__(self, index: int) -> _T: ... + @overload + def __getitem__(self, index: slice) -> Sequence[_T]: ... + def __len__(self) -> int: ... + +class seekable(Generic[_T], Iterator[_T]): + def __init__( + self, iterable: Iterable[_T], maxlen: Optional[int] = ... + ) -> None: ... + def __iter__(self) -> seekable[_T]: ... + def __next__(self) -> _T: ... + def __bool__(self) -> bool: ... + @overload + def peek(self) -> _T: ... + @overload + def peek(self, default: _U) -> Union[_T, _U]: ... + def elements(self) -> SequenceView[_T]: ... + def seek(self, index: int) -> None: ... + +class run_length: + @staticmethod + def encode(iterable: Iterable[_T]) -> Iterator[Tuple[_T, int]]: ... + @staticmethod + def decode(iterable: Iterable[Tuple[_T, int]]) -> Iterator[_T]: ... + +def exactly_n( + iterable: Iterable[_T], n: int, predicate: Callable[[_T], object] = ... +) -> bool: ... +def circular_shifts(iterable: Iterable[_T]) -> List[Tuple[_T, ...]]: ... +def make_decorator( + wrapping_func: Callable[..., _U], result_index: int = ... +) -> Callable[..., Callable[[Callable[..., Any]], Callable[..., _U]]]: ... +@overload +def map_reduce( + iterable: Iterable[_T], + keyfunc: Callable[[_T], _U], + valuefunc: None = ..., + reducefunc: None = ..., +) -> Dict[_U, List[_T]]: ... +@overload +def map_reduce( + iterable: Iterable[_T], + keyfunc: Callable[[_T], _U], + valuefunc: Callable[[_T], _V], + reducefunc: None = ..., +) -> Dict[_U, List[_V]]: ... +@overload +def map_reduce( + iterable: Iterable[_T], + keyfunc: Callable[[_T], _U], + valuefunc: None = ..., + reducefunc: Callable[[List[_T]], _W] = ..., +) -> Dict[_U, _W]: ... +@overload +def map_reduce( + iterable: Iterable[_T], + keyfunc: Callable[[_T], _U], + valuefunc: Callable[[_T], _V], + reducefunc: Callable[[List[_V]], _W], +) -> Dict[_U, _W]: ... +def rlocate( + iterable: Iterable[_T], + pred: Callable[..., object] = ..., + window_size: Optional[int] = ..., +) -> Iterator[int]: ... +def replace( + iterable: Iterable[_T], + pred: Callable[..., object], + substitutes: Iterable[_U], + count: Optional[int] = ..., + window_size: int = ..., +) -> Iterator[Union[_T, _U]]: ... +def partitions(iterable: Iterable[_T]) -> Iterator[List[List[_T]]]: ... +def set_partitions( + iterable: Iterable[_T], k: Optional[int] = ... +) -> Iterator[List[List[_T]]]: ... + +class time_limited(Generic[_T], Iterator[_T]): + def __init__( + self, limit_seconds: float, iterable: Iterable[_T] + ) -> None: ... + def __iter__(self) -> islice_extended[_T]: ... + def __next__(self) -> _T: ... + +@overload +def only( + iterable: Iterable[_T], *, too_long: Optional[_Raisable] = ... +) -> Optional[_T]: ... +@overload +def only( + iterable: Iterable[_T], default: _U, too_long: Optional[_Raisable] = ... +) -> Union[_T, _U]: ... +def ichunked(iterable: Iterable[_T], n: int) -> Iterator[Iterator[_T]]: ... +def distinct_combinations( + iterable: Iterable[_T], r: int +) -> Iterator[Tuple[_T, ...]]: ... +def filter_except( + validator: Callable[[Any], object], + iterable: Iterable[_T], + *exceptions: Type[BaseException] +) -> Iterator[_T]: ... +def map_except( + function: Callable[[Any], _U], + iterable: Iterable[_T], + *exceptions: Type[BaseException] +) -> Iterator[_U]: ... +def map_if( + iterable: Iterable[Any], + pred: Callable[[Any], bool], + func: Callable[[Any], Any], + func_else: Optional[Callable[[Any], Any]] = ..., +) -> Iterator[Any]: ... +def sample( + iterable: Iterable[_T], + k: int, + weights: Optional[Iterable[float]] = ..., +) -> List[_T]: ... +def is_sorted( + iterable: Iterable[_T], + key: Optional[Callable[[_T], _U]] = ..., + reverse: bool = False, + strict: bool = False, +) -> bool: ... + +class AbortThread(BaseException): + pass + +class callback_iter(Generic[_T], Iterator[_T]): + def __init__( + self, + func: Callable[..., Any], + callback_kwd: str = ..., + wait_seconds: float = ..., + ) -> None: ... + def __enter__(self) -> callback_iter[_T]: ... + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> Optional[bool]: ... + def __iter__(self) -> callback_iter[_T]: ... + def __next__(self) -> _T: ... + def _reader(self) -> Iterator[_T]: ... + @property + def done(self) -> bool: ... + @property + def result(self) -> Any: ... + +def windowed_complete( + iterable: Iterable[_T], n: int +) -> Iterator[Tuple[_T, ...]]: ... +def all_unique( + iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ... +) -> bool: ... +def nth_product(index: int, *args: Iterable[_T]) -> Tuple[_T, ...]: ... +def nth_permutation( + iterable: Iterable[_T], r: int, index: int +) -> Tuple[_T, ...]: ... +def value_chain(*args: Union[_T, Iterable[_T]]) -> Iterable[_T]: ... +def product_index(element: Iterable[_T], *args: Iterable[_T]) -> int: ... +def combination_index( + element: Iterable[_T], iterable: Iterable[_T] +) -> int: ... +def permutation_index( + element: Iterable[_T], iterable: Iterable[_T] +) -> int: ... +def repeat_each(iterable: Iterable[_T], n: int = ...) -> Iterator[_T]: ... + +class countable(Generic[_T], Iterator[_T]): + def __init__(self, iterable: Iterable[_T]) -> None: ... + def __iter__(self) -> countable[_T]: ... + def __next__(self) -> _T: ... + +def chunked_even(iterable: Iterable[_T], n: int) -> Iterator[List[_T]]: ... +def zip_broadcast( + *objects: Union[_T, Iterable[_T]], + scalar_types: Union[ + type, Tuple[Union[type, Tuple[Any, ...]], ...], None + ] = ..., + strict: bool = ... +) -> Iterable[Tuple[_T, ...]]: ... +def unique_in_window( + iterable: Iterable[_T], n: int, key: Optional[Callable[[_T], _U]] = ... +) -> Iterator[_T]: ... +def duplicates_everseen( + iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ... +) -> Iterator[_T]: ... +def duplicates_justseen( + iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ... +) -> Iterator[_T]: ... + +class _SupportsLessThan(Protocol): + def __lt__(self, __other: Any) -> bool: ... + +_SupportsLessThanT = TypeVar("_SupportsLessThanT", bound=_SupportsLessThan) + +@overload +def minmax( + iterable_or_value: Iterable[_SupportsLessThanT], *, key: None = None +) -> Tuple[_SupportsLessThanT, _SupportsLessThanT]: ... +@overload +def minmax( + iterable_or_value: Iterable[_T], *, key: Callable[[_T], _SupportsLessThan] +) -> Tuple[_T, _T]: ... +@overload +def minmax( + iterable_or_value: Iterable[_SupportsLessThanT], + *, + key: None = None, + default: _U +) -> Union[_U, Tuple[_SupportsLessThanT, _SupportsLessThanT]]: ... +@overload +def minmax( + iterable_or_value: Iterable[_T], + *, + key: Callable[[_T], _SupportsLessThan], + default: _U, +) -> Union[_U, Tuple[_T, _T]]: ... +@overload +def minmax( + iterable_or_value: _SupportsLessThanT, + __other: _SupportsLessThanT, + *others: _SupportsLessThanT +) -> Tuple[_SupportsLessThanT, _SupportsLessThanT]: ... +@overload +def minmax( + iterable_or_value: _T, + __other: _T, + *others: _T, + key: Callable[[_T], _SupportsLessThan] +) -> Tuple[_T, _T]: ... diff --git a/pkg_resources/_vendor/more_itertools/py.typed b/pkg_resources/_vendor/more_itertools/py.typed new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pkg_resources/_vendor/more_itertools/py.typed diff --git a/pkg_resources/_vendor/more_itertools/recipes.py b/pkg_resources/_vendor/more_itertools/recipes.py new file mode 100644 index 00000000..a2596423 --- /dev/null +++ b/pkg_resources/_vendor/more_itertools/recipes.py @@ -0,0 +1,698 @@ +"""Imported from the recipes section of the itertools documentation. + +All functions taken from the recipes section of the itertools library docs +[1]_. +Some backward-compatible usability improvements have been made. + +.. [1] http://docs.python.org/library/itertools.html#recipes + +""" +import warnings +from collections import deque +from itertools import ( + chain, + combinations, + count, + cycle, + groupby, + islice, + repeat, + starmap, + tee, + zip_longest, +) +import operator +from random import randrange, sample, choice + +__all__ = [ + 'all_equal', + 'before_and_after', + 'consume', + 'convolve', + 'dotproduct', + 'first_true', + 'flatten', + 'grouper', + 'iter_except', + 'ncycles', + 'nth', + 'nth_combination', + 'padnone', + 'pad_none', + 'pairwise', + 'partition', + 'powerset', + 'prepend', + 'quantify', + 'random_combination_with_replacement', + 'random_combination', + 'random_permutation', + 'random_product', + 'repeatfunc', + 'roundrobin', + 'sliding_window', + 'tabulate', + 'tail', + 'take', + 'triplewise', + 'unique_everseen', + 'unique_justseen', +] + + +def take(n, iterable): + """Return first *n* items of the iterable as a list. + + >>> take(3, range(10)) + [0, 1, 2] + + If there are fewer than *n* items in the iterable, all of them are + returned. + + >>> take(10, range(3)) + [0, 1, 2] + + """ + return list(islice(iterable, n)) + + +def tabulate(function, start=0): + """Return an iterator over the results of ``func(start)``, + ``func(start + 1)``, ``func(start + 2)``... + + *func* should be a function that accepts one integer argument. + + If *start* is not specified it defaults to 0. It will be incremented each + time the iterator is advanced. + + >>> square = lambda x: x ** 2 + >>> iterator = tabulate(square, -3) + >>> take(4, iterator) + [9, 4, 1, 0] + + """ + return map(function, count(start)) + + +def tail(n, iterable): + """Return an iterator over the last *n* items of *iterable*. + + >>> t = tail(3, 'ABCDEFG') + >>> list(t) + ['E', 'F', 'G'] + + """ + return iter(deque(iterable, maxlen=n)) + + +def consume(iterator, n=None): + """Advance *iterable* by *n* steps. If *n* is ``None``, consume it + entirely. + + Efficiently exhausts an iterator without returning values. Defaults to + consuming the whole iterator, but an optional second argument may be + provided to limit consumption. + + >>> i = (x for x in range(10)) + >>> next(i) + 0 + >>> consume(i, 3) + >>> next(i) + 4 + >>> consume(i) + >>> next(i) + Traceback (most recent call last): + File "<stdin>", line 1, in <module> + StopIteration + + If the iterator has fewer items remaining than the provided limit, the + whole iterator will be consumed. + + >>> i = (x for x in range(3)) + >>> consume(i, 5) + >>> next(i) + Traceback (most recent call last): + File "<stdin>", line 1, in <module> + StopIteration + + """ + # Use functions that consume iterators at C speed. + if n is None: + # feed the entire iterator into a zero-length deque + deque(iterator, maxlen=0) + else: + # advance to the empty slice starting at position n + next(islice(iterator, n, n), None) + + +def nth(iterable, n, default=None): + """Returns the nth item or a default value. + + >>> l = range(10) + >>> nth(l, 3) + 3 + >>> nth(l, 20, "zebra") + 'zebra' + + """ + return next(islice(iterable, n, None), default) + + +def all_equal(iterable): + """ + Returns ``True`` if all the elements are equal to each other. + + >>> all_equal('aaaa') + True + >>> all_equal('aaab') + False + + """ + g = groupby(iterable) + return next(g, True) and not next(g, False) + + +def quantify(iterable, pred=bool): + """Return the how many times the predicate is true. + + >>> quantify([True, False, True]) + 2 + + """ + return sum(map(pred, iterable)) + + +def pad_none(iterable): + """Returns the sequence of elements and then returns ``None`` indefinitely. + + >>> take(5, pad_none(range(3))) + [0, 1, 2, None, None] + + Useful for emulating the behavior of the built-in :func:`map` function. + + See also :func:`padded`. + + """ + return chain(iterable, repeat(None)) + + +padnone = pad_none + + +def ncycles(iterable, n): + """Returns the sequence elements *n* times + + >>> list(ncycles(["a", "b"], 3)) + ['a', 'b', 'a', 'b', 'a', 'b'] + + """ + return chain.from_iterable(repeat(tuple(iterable), n)) + + +def dotproduct(vec1, vec2): + """Returns the dot product of the two iterables. + + >>> dotproduct([10, 10], [20, 20]) + 400 + + """ + return sum(map(operator.mul, vec1, vec2)) + + +def flatten(listOfLists): + """Return an iterator flattening one level of nesting in a list of lists. + + >>> list(flatten([[0, 1], [2, 3]])) + [0, 1, 2, 3] + + See also :func:`collapse`, which can flatten multiple levels of nesting. + + """ + return chain.from_iterable(listOfLists) + + +def repeatfunc(func, times=None, *args): + """Call *func* with *args* repeatedly, returning an iterable over the + results. + + If *times* is specified, the iterable will terminate after that many + repetitions: + + >>> from operator import add + >>> times = 4 + >>> args = 3, 5 + >>> list(repeatfunc(add, times, *args)) + [8, 8, 8, 8] + + If *times* is ``None`` the iterable will not terminate: + + >>> from random import randrange + >>> times = None + >>> args = 1, 11 + >>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP + [2, 4, 8, 1, 8, 4] + + """ + if times is None: + return starmap(func, repeat(args)) + return starmap(func, repeat(args, times)) + + +def _pairwise(iterable): + """Returns an iterator of paired items, overlapping, from the original + + >>> take(4, pairwise(count())) + [(0, 1), (1, 2), (2, 3), (3, 4)] + + On Python 3.10 and above, this is an alias for :func:`itertools.pairwise`. + + """ + a, b = tee(iterable) + next(b, None) + yield from zip(a, b) + + +try: + from itertools import pairwise as itertools_pairwise +except ImportError: + pairwise = _pairwise +else: + + def pairwise(iterable): + yield from itertools_pairwise(iterable) + + pairwise.__doc__ = _pairwise.__doc__ + + +def grouper(iterable, n, fillvalue=None): + """Collect data into fixed-length chunks or blocks. + + >>> list(grouper('ABCDEFG', 3, 'x')) + [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')] + + """ + if isinstance(iterable, int): + warnings.warn( + "grouper expects iterable as first parameter", DeprecationWarning + ) + n, iterable = iterable, n + args = [iter(iterable)] * n + return zip_longest(fillvalue=fillvalue, *args) + + +def roundrobin(*iterables): + """Yields an item from each iterable, alternating between them. + + >>> list(roundrobin('ABC', 'D', 'EF')) + ['A', 'D', 'E', 'B', 'F', 'C'] + + This function produces the same output as :func:`interleave_longest`, but + may perform better for some inputs (in particular when the number of + iterables is small). + + """ + # Recipe credited to George Sakkis + pending = len(iterables) + nexts = cycle(iter(it).__next__ for it in iterables) + while pending: + try: + for next in nexts: + yield next() + except StopIteration: + pending -= 1 + nexts = cycle(islice(nexts, pending)) + + +def partition(pred, iterable): + """ + Returns a 2-tuple of iterables derived from the input iterable. + The first yields the items that have ``pred(item) == False``. + The second yields the items that have ``pred(item) == True``. + + >>> is_odd = lambda x: x % 2 != 0 + >>> iterable = range(10) + >>> even_items, odd_items = partition(is_odd, iterable) + >>> list(even_items), list(odd_items) + ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9]) + + If *pred* is None, :func:`bool` is used. + + >>> iterable = [0, 1, False, True, '', ' '] + >>> false_items, true_items = partition(None, iterable) + >>> list(false_items), list(true_items) + ([0, False, ''], [1, True, ' ']) + + """ + if pred is None: + pred = bool + + evaluations = ((pred(x), x) for x in iterable) + t1, t2 = tee(evaluations) + return ( + (x for (cond, x) in t1 if not cond), + (x for (cond, x) in t2 if cond), + ) + + +def powerset(iterable): + """Yields all possible subsets of the iterable. + + >>> list(powerset([1, 2, 3])) + [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)] + + :func:`powerset` will operate on iterables that aren't :class:`set` + instances, so repeated elements in the input will produce repeated elements + in the output. Use :func:`unique_everseen` on the input to avoid generating + duplicates: + + >>> seq = [1, 1, 0] + >>> list(powerset(seq)) + [(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)] + >>> from more_itertools import unique_everseen + >>> list(powerset(unique_everseen(seq))) + [(), (1,), (0,), (1, 0)] + + """ + s = list(iterable) + return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1)) + + +def unique_everseen(iterable, key=None): + """ + Yield unique elements, preserving order. + + >>> list(unique_everseen('AAAABBBCCDAABBB')) + ['A', 'B', 'C', 'D'] + >>> list(unique_everseen('ABBCcAD', str.lower)) + ['A', 'B', 'C', 'D'] + + Sequences with a mix of hashable and unhashable items can be used. + The function will be slower (i.e., `O(n^2)`) for unhashable items. + + Remember that ``list`` objects are unhashable - you can use the *key* + parameter to transform the list to a tuple (which is hashable) to + avoid a slowdown. + + >>> iterable = ([1, 2], [2, 3], [1, 2]) + >>> list(unique_everseen(iterable)) # Slow + [[1, 2], [2, 3]] + >>> list(unique_everseen(iterable, key=tuple)) # Faster + [[1, 2], [2, 3]] + + Similary, you may want to convert unhashable ``set`` objects with + ``key=frozenset``. For ``dict`` objects, + ``key=lambda x: frozenset(x.items())`` can be used. + + """ + seenset = set() + seenset_add = seenset.add + seenlist = [] + seenlist_add = seenlist.append + use_key = key is not None + + for element in iterable: + k = key(element) if use_key else element + try: + if k not in seenset: + seenset_add(k) + yield element + except TypeError: + if k not in seenlist: + seenlist_add(k) + yield element + + +def unique_justseen(iterable, key=None): + """Yields elements in order, ignoring serial duplicates + + >>> list(unique_justseen('AAAABBBCCDAABBB')) + ['A', 'B', 'C', 'D', 'A', 'B'] + >>> list(unique_justseen('ABBCcAD', str.lower)) + ['A', 'B', 'C', 'A', 'D'] + + """ + return map(next, map(operator.itemgetter(1), groupby(iterable, key))) + + +def iter_except(func, exception, first=None): + """Yields results from a function repeatedly until an exception is raised. + + Converts a call-until-exception interface to an iterator interface. + Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel + to end the loop. + + >>> l = [0, 1, 2] + >>> list(iter_except(l.pop, IndexError)) + [2, 1, 0] + + Multiple exceptions can be specified as a stopping condition: + + >>> l = [1, 2, 3, '...', 4, 5, 6] + >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError))) + [7, 6, 5] + >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError))) + [4, 3, 2] + >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError))) + [] + + """ + try: + if first is not None: + yield first() + while 1: + yield func() + except exception: + pass + + +def first_true(iterable, default=None, pred=None): + """ + Returns the first true value in the iterable. + + If no true value is found, returns *default* + + If *pred* is not None, returns the first item for which + ``pred(item) == True`` . + + >>> first_true(range(10)) + 1 + >>> first_true(range(10), pred=lambda x: x > 5) + 6 + >>> first_true(range(10), default='missing', pred=lambda x: x > 9) + 'missing' + + """ + return next(filter(pred, iterable), default) + + +def random_product(*args, repeat=1): + """Draw an item at random from each of the input iterables. + + >>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP + ('c', 3, 'Z') + + If *repeat* is provided as a keyword argument, that many items will be + drawn from each iterable. + + >>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP + ('a', 2, 'd', 3) + + This equivalent to taking a random selection from + ``itertools.product(*args, **kwarg)``. + + """ + pools = [tuple(pool) for pool in args] * repeat + return tuple(choice(pool) for pool in pools) + + +def random_permutation(iterable, r=None): + """Return a random *r* length permutation of the elements in *iterable*. + + If *r* is not specified or is ``None``, then *r* defaults to the length of + *iterable*. + + >>> random_permutation(range(5)) # doctest:+SKIP + (3, 4, 0, 1, 2) + + This equivalent to taking a random selection from + ``itertools.permutations(iterable, r)``. + + """ + pool = tuple(iterable) + r = len(pool) if r is None else r + return tuple(sample(pool, r)) + + +def random_combination(iterable, r): + """Return a random *r* length subsequence of the elements in *iterable*. + + >>> random_combination(range(5), 3) # doctest:+SKIP + (2, 3, 4) + + This equivalent to taking a random selection from + ``itertools.combinations(iterable, r)``. + + """ + pool = tuple(iterable) + n = len(pool) + indices = sorted(sample(range(n), r)) + return tuple(pool[i] for i in indices) + + +def random_combination_with_replacement(iterable, r): + """Return a random *r* length subsequence of elements in *iterable*, + allowing individual elements to be repeated. + + >>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP + (0, 0, 1, 2, 2) + + This equivalent to taking a random selection from + ``itertools.combinations_with_replacement(iterable, r)``. + + """ + pool = tuple(iterable) + n = len(pool) + indices = sorted(randrange(n) for i in range(r)) + return tuple(pool[i] for i in indices) + + +def nth_combination(iterable, r, index): + """Equivalent to ``list(combinations(iterable, r))[index]``. + + The subsequences of *iterable* that are of length *r* can be ordered + lexicographically. :func:`nth_combination` computes the subsequence at + sort position *index* directly, without computing the previous + subsequences. + + >>> nth_combination(range(5), 3, 5) + (0, 3, 4) + + ``ValueError`` will be raised If *r* is negative or greater than the length + of *iterable*. + ``IndexError`` will be raised if the given *index* is invalid. + """ + pool = tuple(iterable) + n = len(pool) + if (r < 0) or (r > n): + raise ValueError + + c = 1 + k = min(r, n - r) + for i in range(1, k + 1): + c = c * (n - k + i) // i + + if index < 0: + index += c + + if (index < 0) or (index >= c): + raise IndexError + + result = [] + while r: + c, n, r = c * r // n, n - 1, r - 1 + while index >= c: + index -= c + c, n = c * (n - r) // n, n - 1 + result.append(pool[-1 - n]) + + return tuple(result) + + +def prepend(value, iterator): + """Yield *value*, followed by the elements in *iterator*. + + >>> value = '0' + >>> iterator = ['1', '2', '3'] + >>> list(prepend(value, iterator)) + ['0', '1', '2', '3'] + + To prepend multiple values, see :func:`itertools.chain` + or :func:`value_chain`. + + """ + return chain([value], iterator) + + +def convolve(signal, kernel): + """Convolve the iterable *signal* with the iterable *kernel*. + + >>> signal = (1, 2, 3, 4, 5) + >>> kernel = [3, 2, 1] + >>> list(convolve(signal, kernel)) + [3, 8, 14, 20, 26, 14, 5] + + Note: the input arguments are not interchangeable, as the *kernel* + is immediately consumed and stored. + + """ + kernel = tuple(kernel)[::-1] + n = len(kernel) + window = deque([0], maxlen=n) * n + for x in chain(signal, repeat(0, n - 1)): + window.append(x) + yield sum(map(operator.mul, kernel, window)) + + +def before_and_after(predicate, it): + """A variant of :func:`takewhile` that allows complete access to the + remainder of the iterator. + + >>> it = iter('ABCdEfGhI') + >>> all_upper, remainder = before_and_after(str.isupper, it) + >>> ''.join(all_upper) + 'ABC' + >>> ''.join(remainder) # takewhile() would lose the 'd' + 'dEfGhI' + + Note that the first iterator must be fully consumed before the second + iterator can generate valid results. + """ + it = iter(it) + transition = [] + + def true_iterator(): + for elem in it: + if predicate(elem): + yield elem + else: + transition.append(elem) + return + + def remainder_iterator(): + yield from transition + yield from it + + return true_iterator(), remainder_iterator() + + +def triplewise(iterable): + """Return overlapping triplets from *iterable*. + + >>> list(triplewise('ABCDE')) + [('A', 'B', 'C'), ('B', 'C', 'D'), ('C', 'D', 'E')] + + """ + for (a, _), (b, c) in pairwise(pairwise(iterable)): + yield a, b, c + + +def sliding_window(iterable, n): + """Return a sliding window of width *n* over *iterable*. + + >>> list(sliding_window(range(6), 4)) + [(0, 1, 2, 3), (1, 2, 3, 4), (2, 3, 4, 5)] + + If *iterable* has fewer than *n* items, then nothing is yielded: + + >>> list(sliding_window(range(3), 4)) + [] + + For a variant with more features, see :func:`windowed`. + """ + it = iter(iterable) + window = deque(islice(it, n), maxlen=n) + if len(window) == n: + yield tuple(window) + for x in it: + window.append(x) + yield tuple(window) diff --git a/pkg_resources/_vendor/more_itertools/recipes.pyi b/pkg_resources/_vendor/more_itertools/recipes.pyi new file mode 100644 index 00000000..4648a41b --- /dev/null +++ b/pkg_resources/_vendor/more_itertools/recipes.pyi @@ -0,0 +1,112 @@ +"""Stubs for more_itertools.recipes""" +from typing import ( + Any, + Callable, + Iterable, + Iterator, + List, + Optional, + Tuple, + TypeVar, + Union, +) +from typing_extensions import overload, Type + +# Type and type variable definitions +_T = TypeVar('_T') +_U = TypeVar('_U') + +def take(n: int, iterable: Iterable[_T]) -> List[_T]: ... +def tabulate( + function: Callable[[int], _T], start: int = ... +) -> Iterator[_T]: ... +def tail(n: int, iterable: Iterable[_T]) -> Iterator[_T]: ... +def consume(iterator: Iterable[object], n: Optional[int] = ...) -> None: ... +@overload +def nth(iterable: Iterable[_T], n: int) -> Optional[_T]: ... +@overload +def nth(iterable: Iterable[_T], n: int, default: _U) -> Union[_T, _U]: ... +def all_equal(iterable: Iterable[object]) -> bool: ... +def quantify( + iterable: Iterable[_T], pred: Callable[[_T], bool] = ... +) -> int: ... +def pad_none(iterable: Iterable[_T]) -> Iterator[Optional[_T]]: ... +def padnone(iterable: Iterable[_T]) -> Iterator[Optional[_T]]: ... +def ncycles(iterable: Iterable[_T], n: int) -> Iterator[_T]: ... +def dotproduct(vec1: Iterable[object], vec2: Iterable[object]) -> object: ... +def flatten(listOfLists: Iterable[Iterable[_T]]) -> Iterator[_T]: ... +def repeatfunc( + func: Callable[..., _U], times: Optional[int] = ..., *args: Any +) -> Iterator[_U]: ... +def pairwise(iterable: Iterable[_T]) -> Iterator[Tuple[_T, _T]]: ... +@overload +def grouper( + iterable: Iterable[_T], n: int +) -> Iterator[Tuple[Optional[_T], ...]]: ... +@overload +def grouper( + iterable: Iterable[_T], n: int, fillvalue: _U +) -> Iterator[Tuple[Union[_T, _U], ...]]: ... +@overload +def grouper( # Deprecated interface + iterable: int, n: Iterable[_T] +) -> Iterator[Tuple[Optional[_T], ...]]: ... +@overload +def grouper( # Deprecated interface + iterable: int, n: Iterable[_T], fillvalue: _U +) -> Iterator[Tuple[Union[_T, _U], ...]]: ... +def roundrobin(*iterables: Iterable[_T]) -> Iterator[_T]: ... +def partition( + pred: Optional[Callable[[_T], object]], iterable: Iterable[_T] +) -> Tuple[Iterator[_T], Iterator[_T]]: ... +def powerset(iterable: Iterable[_T]) -> Iterator[Tuple[_T, ...]]: ... +def unique_everseen( + iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ... +) -> Iterator[_T]: ... +def unique_justseen( + iterable: Iterable[_T], key: Optional[Callable[[_T], object]] = ... +) -> Iterator[_T]: ... +@overload +def iter_except( + func: Callable[[], _T], + exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]], + first: None = ..., +) -> Iterator[_T]: ... +@overload +def iter_except( + func: Callable[[], _T], + exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]], + first: Callable[[], _U], +) -> Iterator[Union[_T, _U]]: ... +@overload +def first_true( + iterable: Iterable[_T], *, pred: Optional[Callable[[_T], object]] = ... +) -> Optional[_T]: ... +@overload +def first_true( + iterable: Iterable[_T], + default: _U, + pred: Optional[Callable[[_T], object]] = ..., +) -> Union[_T, _U]: ... +def random_product( + *args: Iterable[_T], repeat: int = ... +) -> Tuple[_T, ...]: ... +def random_permutation( + iterable: Iterable[_T], r: Optional[int] = ... +) -> Tuple[_T, ...]: ... +def random_combination(iterable: Iterable[_T], r: int) -> Tuple[_T, ...]: ... +def random_combination_with_replacement( + iterable: Iterable[_T], r: int +) -> Tuple[_T, ...]: ... +def nth_combination( + iterable: Iterable[_T], r: int, index: int +) -> Tuple[_T, ...]: ... +def prepend(value: _T, iterator: Iterable[_U]) -> Iterator[Union[_T, _U]]: ... +def convolve(signal: Iterable[_T], kernel: Iterable[_T]) -> Iterator[_T]: ... +def before_and_after( + predicate: Callable[[_T], bool], it: Iterable[_T] +) -> Tuple[Iterator[_T], Iterator[_T]]: ... +def triplewise(iterable: Iterable[_T]) -> Iterator[Tuple[_T, _T, _T]]: ... +def sliding_window( + iterable: Iterable[_T], n: int +) -> Iterator[Tuple[_T, ...]]: ... diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/INSTALLER b/pkg_resources/_vendor/packaging-21.3.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/pkg_resources/_vendor/packaging-21.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/LICENSE b/pkg_resources/_vendor/packaging-21.3.dist-info/LICENSE new file mode 100644 index 00000000..6f62d44e --- /dev/null +++ b/pkg_resources/_vendor/packaging-21.3.dist-info/LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made +under the terms of *both* these licenses. diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/LICENSE.APACHE b/pkg_resources/_vendor/packaging-21.3.dist-info/LICENSE.APACHE new file mode 100644 index 00000000..f433b1a5 --- /dev/null +++ b/pkg_resources/_vendor/packaging-21.3.dist-info/LICENSE.APACHE @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/LICENSE.BSD b/pkg_resources/_vendor/packaging-21.3.dist-info/LICENSE.BSD new file mode 100644 index 00000000..42ce7b75 --- /dev/null +++ b/pkg_resources/_vendor/packaging-21.3.dist-info/LICENSE.BSD @@ -0,0 +1,23 @@ +Copyright (c) Donald Stufft and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/METADATA b/pkg_resources/_vendor/packaging-21.3.dist-info/METADATA new file mode 100644 index 00000000..358ace53 --- /dev/null +++ b/pkg_resources/_vendor/packaging-21.3.dist-info/METADATA @@ -0,0 +1,453 @@ +Metadata-Version: 2.1 +Name: packaging +Version: 21.3 +Summary: Core utilities for Python packages +Home-page: https://github.com/pypa/packaging +Author: Donald Stufft and individual contributors +Author-email: donald@stufft.io +License: BSD-2-Clause or Apache-2.0 +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: LICENSE.APACHE +License-File: LICENSE.BSD +Requires-Dist: pyparsing (!=3.0.5,>=2.0.2) + +packaging +========= + +.. start-intro + +Reusable core utilities for various Python Packaging +`interoperability specifications <https://packaging.python.org/specifications/>`_. + +This library provides utilities that implement the interoperability +specifications which have clearly one correct behaviour (eg: :pep:`440`) +or benefit greatly from having a single shared implementation (eg: :pep:`425`). + +.. end-intro + +The ``packaging`` project includes the following: version handling, specifiers, +markers, requirements, tags, utilities. + +Documentation +------------- + +The `documentation`_ provides information and the API for the following: + +- Version Handling +- Specifiers +- Markers +- Requirements +- Tags +- Utilities + +Installation +------------ + +Use ``pip`` to install these utilities:: + + pip install packaging + +Discussion +---------- + +If you run into bugs, you can file them in our `issue tracker`_. + +You can also join ``#pypa`` on Freenode to ask questions or get involved. + + +.. _`documentation`: https://packaging.pypa.io/ +.. _`issue tracker`: https://github.com/pypa/packaging/issues + + +Code of Conduct +--------------- + +Everyone interacting in the packaging project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + +Contributing +------------ + +The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as +well as how to report a potential security issue. The documentation for this +project also covers information about `project development`_ and `security`_. + +.. _`project development`: https://packaging.pypa.io/en/latest/development/ +.. _`security`: https://packaging.pypa.io/en/latest/security/ + +Project History +--------------- + +Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for +recent changes and project history. + +.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ + +Changelog +--------- + +21.3 - 2021-11-17 +~~~~~~~~~~~~~~~~~ + +* Add a ``pp3-none-any`` tag (`#311 <https://github.com/pypa/packaging/issues/311>`__) +* Replace the blank pyparsing 3 exclusion with a 3.0.5 exclusion (`#481 <https://github.com/pypa/packaging/issues/481>`__, `#486 <https://github.com/pypa/packaging/issues/486>`__) +* Fix a spelling mistake (`#479 <https://github.com/pypa/packaging/issues/479>`__) + +21.2 - 2021-10-29 +~~~~~~~~~~~~~~~~~ + +* Update documentation entry for 21.1. + +21.1 - 2021-10-29 +~~~~~~~~~~~~~~~~~ + +* Update pin to pyparsing to exclude 3.0.0. + +21.0 - 2021-07-03 +~~~~~~~~~~~~~~~~~ + +* PEP 656: musllinux support (`#411 <https://github.com/pypa/packaging/issues/411>`__) +* Drop support for Python 2.7, Python 3.4 and Python 3.5. +* Replace distutils usage with sysconfig (`#396 <https://github.com/pypa/packaging/issues/396>`__) +* Add support for zip files in ``parse_sdist_filename`` (`#429 <https://github.com/pypa/packaging/issues/429>`__) +* Use cached ``_hash`` attribute to short-circuit tag equality comparisons (`#417 <https://github.com/pypa/packaging/issues/417>`__) +* Specify the default value for the ``specifier`` argument to ``SpecifierSet`` (`#437 <https://github.com/pypa/packaging/issues/437>`__) +* Proper keyword-only "warn" argument in packaging.tags (`#403 <https://github.com/pypa/packaging/issues/403>`__) +* Correctly remove prerelease suffixes from ~= check (`#366 <https://github.com/pypa/packaging/issues/366>`__) +* Fix type hints for ``Version.post`` and ``Version.dev`` (`#393 <https://github.com/pypa/packaging/issues/393>`__) +* Use typing alias ``UnparsedVersion`` (`#398 <https://github.com/pypa/packaging/issues/398>`__) +* Improve type inference for ``packaging.specifiers.filter()`` (`#430 <https://github.com/pypa/packaging/issues/430>`__) +* Tighten the return type of ``canonicalize_version()`` (`#402 <https://github.com/pypa/packaging/issues/402>`__) + +20.9 - 2021-01-29 +~~~~~~~~~~~~~~~~~ + +* Run `isort <https://pypi.org/project/isort/>`_ over the code base (`#377 <https://github.com/pypa/packaging/issues/377>`__) +* Add support for the ``macosx_10_*_universal2`` platform tags (`#379 <https://github.com/pypa/packaging/issues/379>`__) +* Introduce ``packaging.utils.parse_wheel_filename()`` and ``parse_sdist_filename()`` + (`#387 <https://github.com/pypa/packaging/issues/387>`__ and `#389 <https://github.com/pypa/packaging/issues/389>`__) + +20.8 - 2020-12-11 +~~~~~~~~~~~~~~~~~ + +* Revert back to setuptools for compatibility purposes for some Linux distros (`#363 <https://github.com/pypa/packaging/issues/363>`__) +* Do not insert an underscore in wheel tags when the interpreter version number + is more than 2 digits (`#372 <https://github.com/pypa/packaging/issues/372>`__) + +20.7 - 2020-11-28 +~~~~~~~~~~~~~~~~~ + +No unreleased changes. + +20.6 - 2020-11-28 +~~~~~~~~~~~~~~~~~ + +.. note:: This release was subsequently yanked, and these changes were included in 20.7. + +* Fix flit configuration, to include LICENSE files (`#357 <https://github.com/pypa/packaging/issues/357>`__) +* Make `intel` a recognized CPU architecture for the `universal` macOS platform tag (`#361 <https://github.com/pypa/packaging/issues/361>`__) +* Add some missing type hints to `packaging.requirements` (issue:`350`) + +20.5 - 2020-11-27 +~~~~~~~~~~~~~~~~~ + +* Officially support Python 3.9 (`#343 <https://github.com/pypa/packaging/issues/343>`__) +* Deprecate the ``LegacyVersion`` and ``LegacySpecifier`` classes (`#321 <https://github.com/pypa/packaging/issues/321>`__) +* Handle ``OSError`` on non-dynamic executables when attempting to resolve + the glibc version string. + +20.4 - 2020-05-19 +~~~~~~~~~~~~~~~~~ + +* Canonicalize version before comparing specifiers. (`#282 <https://github.com/pypa/packaging/issues/282>`__) +* Change type hint for ``canonicalize_name`` to return + ``packaging.utils.NormalizedName``. + This enables the use of static typing tools (like mypy) to detect mixing of + normalized and un-normalized names. + +20.3 - 2020-03-05 +~~~~~~~~~~~~~~~~~ + +* Fix changelog for 20.2. + +20.2 - 2020-03-05 +~~~~~~~~~~~~~~~~~ + +* Fix a bug that caused a 32-bit OS that runs on a 64-bit ARM CPU (e.g. ARM-v8, + aarch64), to report the wrong bitness. + +20.1 - 2020-01-24 +~~~~~~~~~~~~~~~~~~~ + +* Fix a bug caused by reuse of an exhausted iterator. (`#257 <https://github.com/pypa/packaging/issues/257>`__) + +20.0 - 2020-01-06 +~~~~~~~~~~~~~~~~~ + +* Add type hints (`#191 <https://github.com/pypa/packaging/issues/191>`__) + +* Add proper trove classifiers for PyPy support (`#198 <https://github.com/pypa/packaging/issues/198>`__) + +* Scale back depending on ``ctypes`` for manylinux support detection (`#171 <https://github.com/pypa/packaging/issues/171>`__) + +* Use ``sys.implementation.name`` where appropriate for ``packaging.tags`` (`#193 <https://github.com/pypa/packaging/issues/193>`__) + +* Expand upon the API provided by ``packaging.tags``: ``interpreter_name()``, ``mac_platforms()``, ``compatible_tags()``, ``cpython_tags()``, ``generic_tags()`` (`#187 <https://github.com/pypa/packaging/issues/187>`__) + +* Officially support Python 3.8 (`#232 <https://github.com/pypa/packaging/issues/232>`__) + +* Add ``major``, ``minor``, and ``micro`` aliases to ``packaging.version.Version`` (`#226 <https://github.com/pypa/packaging/issues/226>`__) + +* Properly mark ``packaging`` has being fully typed by adding a `py.typed` file (`#226 <https://github.com/pypa/packaging/issues/226>`__) + +19.2 - 2019-09-18 +~~~~~~~~~~~~~~~~~ + +* Remove dependency on ``attrs`` (`#178 <https://github.com/pypa/packaging/issues/178>`__, `#179 <https://github.com/pypa/packaging/issues/179>`__) + +* Use appropriate fallbacks for CPython ABI tag (`#181 <https://github.com/pypa/packaging/issues/181>`__, `#185 <https://github.com/pypa/packaging/issues/185>`__) + +* Add manylinux2014 support (`#186 <https://github.com/pypa/packaging/issues/186>`__) + +* Improve ABI detection (`#181 <https://github.com/pypa/packaging/issues/181>`__) + +* Properly handle debug wheels for Python 3.8 (`#172 <https://github.com/pypa/packaging/issues/172>`__) + +* Improve detection of debug builds on Windows (`#194 <https://github.com/pypa/packaging/issues/194>`__) + +19.1 - 2019-07-30 +~~~~~~~~~~~~~~~~~ + +* Add the ``packaging.tags`` module. (`#156 <https://github.com/pypa/packaging/issues/156>`__) + +* Correctly handle two-digit versions in ``python_version`` (`#119 <https://github.com/pypa/packaging/issues/119>`__) + + +19.0 - 2019-01-20 +~~~~~~~~~~~~~~~~~ + +* Fix string representation of PEP 508 direct URL requirements with markers. + +* Better handling of file URLs + + This allows for using ``file:///absolute/path``, which was previously + prevented due to the missing ``netloc``. + + This allows for all file URLs that ``urlunparse`` turns back into the + original URL to be valid. + + +18.0 - 2018-09-26 +~~~~~~~~~~~~~~~~~ + +* Improve error messages when invalid requirements are given. (`#129 <https://github.com/pypa/packaging/issues/129>`__) + + +17.1 - 2017-02-28 +~~~~~~~~~~~~~~~~~ + +* Fix ``utils.canonicalize_version`` when supplying non PEP 440 versions. + + +17.0 - 2017-02-28 +~~~~~~~~~~~~~~~~~ + +* Drop support for python 2.6, 3.2, and 3.3. + +* Define minimal pyparsing version to 2.0.2 (`#91 <https://github.com/pypa/packaging/issues/91>`__). + +* Add ``epoch``, ``release``, ``pre``, ``dev``, and ``post`` attributes to + ``Version`` and ``LegacyVersion`` (`#34 <https://github.com/pypa/packaging/issues/34>`__). + +* Add ``Version().is_devrelease`` and ``LegacyVersion().is_devrelease`` to + make it easy to determine if a release is a development release. + +* Add ``utils.canonicalize_version`` to canonicalize version strings or + ``Version`` instances (`#121 <https://github.com/pypa/packaging/issues/121>`__). + + +16.8 - 2016-10-29 +~~~~~~~~~~~~~~~~~ + +* Fix markers that utilize ``in`` so that they render correctly. + +* Fix an erroneous test on Python RC releases. + + +16.7 - 2016-04-23 +~~~~~~~~~~~~~~~~~ + +* Add support for the deprecated ``python_implementation`` marker which was + an undocumented setuptools marker in addition to the newer markers. + + +16.6 - 2016-03-29 +~~~~~~~~~~~~~~~~~ + +* Add support for the deprecated, PEP 345 environment markers in addition to + the newer markers. + + +16.5 - 2016-02-26 +~~~~~~~~~~~~~~~~~ + +* Fix a regression in parsing requirements with whitespaces between the comma + separators. + + +16.4 - 2016-02-22 +~~~~~~~~~~~~~~~~~ + +* Fix a regression in parsing requirements like ``foo (==4)``. + + +16.3 - 2016-02-21 +~~~~~~~~~~~~~~~~~ + +* Fix a bug where ``packaging.requirements:Requirement`` was overly strict when + matching legacy requirements. + + +16.2 - 2016-02-09 +~~~~~~~~~~~~~~~~~ + +* Add a function that implements the name canonicalization from PEP 503. + + +16.1 - 2016-02-07 +~~~~~~~~~~~~~~~~~ + +* Implement requirement specifiers from PEP 508. + + +16.0 - 2016-01-19 +~~~~~~~~~~~~~~~~~ + +* Relicense so that packaging is available under *either* the Apache License, + Version 2.0 or a 2 Clause BSD license. + +* Support installation of packaging when only distutils is available. + +* Fix ``==`` comparison when there is a prefix and a local version in play. + (`#41 <https://github.com/pypa/packaging/issues/41>`__). + +* Implement environment markers from PEP 508. + + +15.3 - 2015-08-01 +~~~~~~~~~~~~~~~~~ + +* Normalize post-release spellings for rev/r prefixes. `#35 <https://github.com/pypa/packaging/issues/35>`__ + + +15.2 - 2015-05-13 +~~~~~~~~~~~~~~~~~ + +* Fix an error where the arbitrary specifier (``===``) was not correctly + allowing pre-releases when it was being used. + +* Expose the specifier and version parts through properties on the + ``Specifier`` classes. + +* Allow iterating over the ``SpecifierSet`` to get access to all of the + ``Specifier`` instances. + +* Allow testing if a version is contained within a specifier via the ``in`` + operator. + + +15.1 - 2015-04-13 +~~~~~~~~~~~~~~~~~ + +* Fix a logic error that was causing inconsistent answers about whether or not + a pre-release was contained within a ``SpecifierSet`` or not. + + +15.0 - 2015-01-02 +~~~~~~~~~~~~~~~~~ + +* Add ``Version().is_postrelease`` and ``LegacyVersion().is_postrelease`` to + make it easy to determine if a release is a post release. + +* Add ``Version().base_version`` and ``LegacyVersion().base_version`` to make + it easy to get the public version without any pre or post release markers. + +* Support the update to PEP 440 which removed the implied ``!=V.*`` when using + either ``>V`` or ``<V`` and which instead special cased the handling of + pre-releases, post-releases, and local versions when using ``>V`` or ``<V``. + + +14.5 - 2014-12-17 +~~~~~~~~~~~~~~~~~ + +* Normalize release candidates as ``rc`` instead of ``c``. + +* Expose the ``VERSION_PATTERN`` constant, a regular expression matching + a valid version. + + +14.4 - 2014-12-15 +~~~~~~~~~~~~~~~~~ + +* Ensure that versions are normalized before comparison when used in a + specifier with a less than (``<``) or greater than (``>``) operator. + + +14.3 - 2014-11-19 +~~~~~~~~~~~~~~~~~ + +* **BACKWARDS INCOMPATIBLE** Refactor specifier support so that it can sanely + handle legacy specifiers as well as PEP 440 specifiers. + +* **BACKWARDS INCOMPATIBLE** Move the specifier support out of + ``packaging.version`` into ``packaging.specifiers``. + + +14.2 - 2014-09-10 +~~~~~~~~~~~~~~~~~ + +* Add prerelease support to ``Specifier``. +* Remove the ability to do ``item in Specifier()`` and replace it with + ``Specifier().contains(item)`` in order to allow flags that signal if a + prerelease should be accepted or not. +* Add a method ``Specifier().filter()`` which will take an iterable and returns + an iterable with items that do not match the specifier filtered out. + + +14.1 - 2014-09-08 +~~~~~~~~~~~~~~~~~ + +* Allow ``LegacyVersion`` and ``Version`` to be sorted together. +* Add ``packaging.version.parse()`` to enable easily parsing a version string + as either a ``Version`` or a ``LegacyVersion`` depending on it's PEP 440 + validity. + + +14.0 - 2014-09-05 +~~~~~~~~~~~~~~~~~ + +* Initial release. + + +.. _`master`: https://github.com/pypa/packaging/ + + diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/RECORD b/pkg_resources/_vendor/packaging-21.3.dist-info/RECORD new file mode 100644 index 00000000..97cace10 --- /dev/null +++ b/pkg_resources/_vendor/packaging-21.3.dist-info/RECORD @@ -0,0 +1,32 @@ +packaging-21.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+packaging-21.3.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
+packaging-21.3.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
+packaging-21.3.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
+packaging-21.3.dist-info/METADATA,sha256=KuKIy6qDLP3svIt6ejCbxBDhvq11ebkgUN55MeyKFyc,15147
+packaging-21.3.dist-info/RECORD,,
+packaging-21.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+packaging-21.3.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
+packaging-21.3.dist-info/top_level.txt,sha256=zFdHrhWnPslzsiP455HutQsqPB6v0KCtNUMtUtrefDw,10
+packaging/__about__.py,sha256=ugASIO2w1oUyH8_COqQ2X_s0rDhjbhQC3yJocD03h2c,661
+packaging/__init__.py,sha256=b9Kk5MF7KxhhLgcDmiUWukN-LatWFxPdNug0joPhHSk,497
+packaging/__pycache__/__about__.cpython-310.pyc,,
+packaging/__pycache__/__init__.cpython-310.pyc,,
+packaging/__pycache__/_manylinux.cpython-310.pyc,,
+packaging/__pycache__/_musllinux.cpython-310.pyc,,
+packaging/__pycache__/_structures.cpython-310.pyc,,
+packaging/__pycache__/markers.cpython-310.pyc,,
+packaging/__pycache__/requirements.cpython-310.pyc,,
+packaging/__pycache__/specifiers.cpython-310.pyc,,
+packaging/__pycache__/tags.cpython-310.pyc,,
+packaging/__pycache__/utils.cpython-310.pyc,,
+packaging/__pycache__/version.cpython-310.pyc,,
+packaging/_manylinux.py,sha256=XcbiXB-qcjv3bcohp6N98TMpOP4_j3m-iOA8ptK2GWY,11488
+packaging/_musllinux.py,sha256=_KGgY_qc7vhMGpoqss25n2hiLCNKRtvz9mCrS7gkqyc,4378
+packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
+packaging/markers.py,sha256=Fygi3_eZnjQ-3VJizW5AhI5wvo0Hb6RMk4DidsKpOC0,8475
+packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+packaging/requirements.py,sha256=rjaGRCMepZS1mlYMjJ5Qh6rfq3gtsCRQUQmftGZ_bu8,4664
+packaging/specifiers.py,sha256=LRQ0kFsHrl5qfcFNEEJrIFYsnIHQUJXY9fIsakTrrqE,30110
+packaging/tags.py,sha256=lmsnGNiJ8C4D_Pf9PbM0qgbZvD9kmB9lpZBQUZa3R_Y,15699
+packaging/utils.py,sha256=dJjeat3BS-TYn1RrUFVwufUMasbtzLfYRoy_HXENeFQ,4200
+packaging/version.py,sha256=_fLRNrFrxYcHVfyo8vk9j8s6JM8N_xsSxVFr6RJyco8,14665
diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/REQUESTED b/pkg_resources/_vendor/packaging-21.3.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pkg_resources/_vendor/packaging-21.3.dist-info/REQUESTED diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/WHEEL b/pkg_resources/_vendor/packaging-21.3.dist-info/WHEEL new file mode 100644 index 00000000..5bad85fd --- /dev/null +++ b/pkg_resources/_vendor/packaging-21.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/top_level.txt b/pkg_resources/_vendor/packaging-21.3.dist-info/top_level.txt new file mode 100644 index 00000000..748809f7 --- /dev/null +++ b/pkg_resources/_vendor/packaging-21.3.dist-info/top_level.txt @@ -0,0 +1 @@ +packaging diff --git a/pkg_resources/_vendor/packaging/__about__.py b/pkg_resources/_vendor/packaging/__about__.py index c359122f..3551bc2d 100644 --- a/pkg_resources/_vendor/packaging/__about__.py +++ b/pkg_resources/_vendor/packaging/__about__.py @@ -17,7 +17,7 @@ __title__ = "packaging" __summary__ = "Core utilities for Python packages" __uri__ = "https://github.com/pypa/packaging" -__version__ = "21.2" +__version__ = "21.3" __author__ = "Donald Stufft and individual contributors" __email__ = "donald@stufft.io" diff --git a/pkg_resources/_vendor/packaging/_musllinux.py b/pkg_resources/_vendor/packaging/_musllinux.py index 85450faf..8ac3059b 100644 --- a/pkg_resources/_vendor/packaging/_musllinux.py +++ b/pkg_resources/_vendor/packaging/_musllinux.py @@ -98,7 +98,7 @@ def _get_musl_version(executable: str) -> Optional[_MuslVersion]: with contextlib.ExitStack() as stack: try: f = stack.enter_context(open(executable, "rb")) - except IOError: + except OSError: return None ld = _parse_ld_musl_from_elf(f) if not ld: diff --git a/pkg_resources/_vendor/packaging/_structures.py b/pkg_resources/_vendor/packaging/_structures.py index 95154975..90a6465f 100644 --- a/pkg_resources/_vendor/packaging/_structures.py +++ b/pkg_resources/_vendor/packaging/_structures.py @@ -19,9 +19,6 @@ class InfinityType: def __eq__(self, other: object) -> bool: return isinstance(other, self.__class__) - def __ne__(self, other: object) -> bool: - return not isinstance(other, self.__class__) - def __gt__(self, other: object) -> bool: return True @@ -51,9 +48,6 @@ class NegativeInfinityType: def __eq__(self, other: object) -> bool: return isinstance(other, self.__class__) - def __ne__(self, other: object) -> bool: - return not isinstance(other, self.__class__) - def __gt__(self, other: object) -> bool: return False diff --git a/pkg_resources/_vendor/packaging/specifiers.py b/pkg_resources/_vendor/packaging/specifiers.py index ce66bd4a..0e218a6f 100644 --- a/pkg_resources/_vendor/packaging/specifiers.py +++ b/pkg_resources/_vendor/packaging/specifiers.py @@ -57,13 +57,6 @@ class BaseSpecifier(metaclass=abc.ABCMeta): objects are equal. """ - @abc.abstractmethod - def __ne__(self, other: object) -> bool: - """ - Returns a boolean representing whether or not the two Specifier like - objects are not equal. - """ - @abc.abstractproperty def prereleases(self) -> Optional[bool]: """ @@ -119,7 +112,7 @@ class _IndividualSpecifier(BaseSpecifier): else "" ) - return "<{}({!r}{})>".format(self.__class__.__name__, str(self), pre) + return f"<{self.__class__.__name__}({str(self)!r}{pre})>" def __str__(self) -> str: return "{}{}".format(*self._spec) @@ -142,17 +135,6 @@ class _IndividualSpecifier(BaseSpecifier): return self._canonical_spec == other._canonical_spec - def __ne__(self, other: object) -> bool: - if isinstance(other, str): - try: - other = self.__class__(str(other)) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._spec != other._spec - def _get_operator(self, op: str) -> CallableOperator: operator_callable: CallableOperator = getattr( self, f"_compare_{self._operators[op]}" @@ -667,7 +649,7 @@ class SpecifierSet(BaseSpecifier): else "" ) - return "<SpecifierSet({!r}{})>".format(str(self), pre) + return f"<SpecifierSet({str(self)!r}{pre})>" def __str__(self) -> str: return ",".join(sorted(str(s) for s in self._specs)) @@ -706,14 +688,6 @@ class SpecifierSet(BaseSpecifier): return self._specs == other._specs - def __ne__(self, other: object) -> bool: - if isinstance(other, (str, _IndividualSpecifier)): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs != other._specs - def __len__(self) -> int: return len(self._specs) diff --git a/pkg_resources/_vendor/packaging/tags.py b/pkg_resources/_vendor/packaging/tags.py index e65890a9..9a3d25a7 100644 --- a/pkg_resources/_vendor/packaging/tags.py +++ b/pkg_resources/_vendor/packaging/tags.py @@ -90,7 +90,7 @@ class Tag: return f"{self._interpreter}-{self._abi}-{self._platform}" def __repr__(self) -> str: - return "<{self} @ {self_id}>".format(self=self, self_id=id(self)) + return f"<{self} @ {id(self)}>" def parse_tag(tag: str) -> FrozenSet[Tag]: @@ -192,7 +192,7 @@ def cpython_tags( if not python_version: python_version = sys.version_info[:2] - interpreter = "cp{}".format(_version_nodot(python_version[:2])) + interpreter = f"cp{_version_nodot(python_version[:2])}" if abis is None: if len(python_version) > 1: @@ -268,11 +268,11 @@ def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: all previous versions of that major version. """ if len(py_version) > 1: - yield "py{version}".format(version=_version_nodot(py_version[:2])) - yield "py{major}".format(major=py_version[0]) + yield f"py{_version_nodot(py_version[:2])}" + yield f"py{py_version[0]}" if len(py_version) > 1: for minor in range(py_version[1] - 1, -1, -1): - yield "py{version}".format(version=_version_nodot((py_version[0], minor))) + yield f"py{_version_nodot((py_version[0], minor))}" def compatible_tags( @@ -481,4 +481,7 @@ def sys_tags(*, warn: bool = False) -> Iterator[Tag]: else: yield from generic_tags() - yield from compatible_tags() + if interp_name == "pp": + yield from compatible_tags(interpreter="pp3") + else: + yield from compatible_tags() diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/DESCRIPTION.rst b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..e1187231 --- /dev/null +++ b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/DESCRIPTION.rst @@ -0,0 +1,3 @@ +UNKNOWN + + diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/INSTALLER b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt new file mode 100644 index 00000000..bbc959e0 --- /dev/null +++ b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt @@ -0,0 +1,18 @@ +Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/METADATA b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/METADATA new file mode 100644 index 00000000..a15c350e --- /dev/null +++ b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/METADATA @@ -0,0 +1,30 @@ +Metadata-Version: 2.0 +Name: pyparsing +Version: 2.2.1 +Summary: Python parsing module +Home-page: https://github.com/pyparsing/pyparsing/ +Author: Paul McGuire +Author-email: ptmcg@users.sourceforge.net +License: MIT License +Download-URL: https://pypi.org/project/pyparsing/ +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.* + +UNKNOWN + + diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/RECORD b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/RECORD new file mode 100644 index 00000000..09cc30e3 --- /dev/null +++ b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/RECORD @@ -0,0 +1,11 @@ +__pycache__/pyparsing.cpython-310.pyc,,
+pyparsing-2.2.1.dist-info/DESCRIPTION.rst,sha256=OCTuuN6LcWulhHS3d5rfjdsQtW22n7HENFRh6jC6ego,10
+pyparsing-2.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+pyparsing-2.2.1.dist-info/LICENSE.txt,sha256=081Pq74Spe1XdwrGkewNKSqa078kLIh7UWI-wVjdj8I,1041
+pyparsing-2.2.1.dist-info/METADATA,sha256=I0jhx9vpUYlQXjn4gVDnFFoAt3nNrxwR4iuqA_pknYs,1091
+pyparsing-2.2.1.dist-info/RECORD,,
+pyparsing-2.2.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pyparsing-2.2.1.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110
+pyparsing-2.2.1.dist-info/metadata.json,sha256=v1_77-dSdajUZSItSJg8Ov9M713STY3PzhyrRvs1ax4,1185
+pyparsing-2.2.1.dist-info/top_level.txt,sha256=eUOjGzJVhlQ3WS2rFAy2mN3LX_7FKTM5GSJ04jfnLmU,10
+pyparsing.py,sha256=tmrp-lu-qO1i75ZzIN5A12nKRRD1Cm4Vpk-5LR9rims,232055
diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/REQUESTED b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/REQUESTED diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/WHEEL b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/WHEEL new file mode 100644 index 00000000..7332a419 --- /dev/null +++ b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/metadata.json b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/metadata.json new file mode 100644 index 00000000..b760b766 --- /dev/null +++ b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7"], "download_url": "https://pypi.org/project/pyparsing/", "extensions": {"python.details": {"contacts": [{"email": "ptmcg@users.sourceforge.net", "name": "Paul McGuire", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "https://github.com/pyparsing/pyparsing/"}}}, "generator": "bdist_wheel (0.30.0)", "license": "MIT License", "metadata_version": "2.0", "name": "pyparsing", "requires_python": ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*", "summary": "Python parsing module", "version": "2.2.1"}
\ No newline at end of file diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/top_level.txt b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/top_level.txt new file mode 100644 index 00000000..210dfec5 --- /dev/null +++ b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/top_level.txt @@ -0,0 +1 @@ +pyparsing diff --git a/pkg_resources/_vendor/vendored.txt b/pkg_resources/_vendor/vendored.txt index 444ed25b..d5dbe736 100644 --- a/pkg_resources/_vendor/vendored.txt +++ b/pkg_resources/_vendor/vendored.txt @@ -1,3 +1,8 @@ -packaging==21.2 +packaging==21.3 pyparsing==2.2.1 appdirs==1.4.3 +jaraco.text==3.7.0 +# required for jaraco.text on older Pythons +importlib_resources==5.4.0 +# required for importlib_resources on older Pythons +zipp==3.7.0 diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/INSTALLER b/pkg_resources/_vendor/zipp-3.7.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/pkg_resources/_vendor/zipp-3.7.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/LICENSE b/pkg_resources/_vendor/zipp-3.7.0.dist-info/LICENSE new file mode 100644 index 00000000..353924be --- /dev/null +++ b/pkg_resources/_vendor/zipp-3.7.0.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/METADATA b/pkg_resources/_vendor/zipp-3.7.0.dist-info/METADATA new file mode 100644 index 00000000..b1308b5f --- /dev/null +++ b/pkg_resources/_vendor/zipp-3.7.0.dist-info/METADATA @@ -0,0 +1,58 @@ +Metadata-Version: 2.1 +Name: zipp +Version: 3.7.0 +Summary: Backport of pathlib-compatible object wrapper for zip files +Home-page: https://github.com/jaraco/zipp +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.7 +License-File: LICENSE +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: jaraco.itertools ; extra == 'testing' +Requires-Dist: func-timeout ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/zipp.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/zipp.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/zipp + +.. image:: https://github.com/jaraco/zipp/workflows/tests/badge.svg + :target: https://github.com/jaraco/zipp/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. .. image:: https://readthedocs.org/projects/skeleton/badge/?version=latest +.. :target: https://skeleton.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2021-informational + :target: https://blog.jaraco.com/skeleton + + +A pathlib-compatible Zipfile object wrapper. Official backport of the standard library +`Path object <https://docs.python.org/3.8/library/zipfile.html#path-objects>`_. + + diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD b/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD new file mode 100644 index 00000000..38d0b21a --- /dev/null +++ b/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD @@ -0,0 +1,9 @@ +__pycache__/zipp.cpython-310.pyc,,
+zipp-3.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+zipp-3.7.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+zipp-3.7.0.dist-info/METADATA,sha256=ZLzgaXTyZX_MxTU0lcGfhdPY4CjFrT_3vyQ2Fo49pl8,2261
+zipp-3.7.0.dist-info/RECORD,,
+zipp-3.7.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+zipp-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
+zipp-3.7.0.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5
+zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/REQUESTED b/pkg_resources/_vendor/zipp-3.7.0.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/pkg_resources/_vendor/zipp-3.7.0.dist-info/REQUESTED diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/WHEEL b/pkg_resources/_vendor/zipp-3.7.0.dist-info/WHEEL new file mode 100644 index 00000000..becc9a66 --- /dev/null +++ b/pkg_resources/_vendor/zipp-3.7.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/top_level.txt b/pkg_resources/_vendor/zipp-3.7.0.dist-info/top_level.txt new file mode 100644 index 00000000..e82f676f --- /dev/null +++ b/pkg_resources/_vendor/zipp-3.7.0.dist-info/top_level.txt @@ -0,0 +1 @@ +zipp diff --git a/pkg_resources/_vendor/zipp.py b/pkg_resources/_vendor/zipp.py new file mode 100644 index 00000000..26b723c1 --- /dev/null +++ b/pkg_resources/_vendor/zipp.py @@ -0,0 +1,329 @@ +import io +import posixpath +import zipfile +import itertools +import contextlib +import sys +import pathlib + +if sys.version_info < (3, 7): + from collections import OrderedDict +else: + OrderedDict = dict + + +__all__ = ['Path'] + + +def _parents(path): + """ + Given a path with elements separated by + posixpath.sep, generate all parents of that path. + + >>> list(_parents('b/d')) + ['b'] + >>> list(_parents('/b/d/')) + ['/b'] + >>> list(_parents('b/d/f/')) + ['b/d', 'b'] + >>> list(_parents('b')) + [] + >>> list(_parents('')) + [] + """ + return itertools.islice(_ancestry(path), 1, None) + + +def _ancestry(path): + """ + Given a path with elements separated by + posixpath.sep, generate all elements of that path + + >>> list(_ancestry('b/d')) + ['b/d', 'b'] + >>> list(_ancestry('/b/d/')) + ['/b/d', '/b'] + >>> list(_ancestry('b/d/f/')) + ['b/d/f', 'b/d', 'b'] + >>> list(_ancestry('b')) + ['b'] + >>> list(_ancestry('')) + [] + """ + path = path.rstrip(posixpath.sep) + while path and path != posixpath.sep: + yield path + path, tail = posixpath.split(path) + + +_dedupe = OrderedDict.fromkeys +"""Deduplicate an iterable in original order""" + + +def _difference(minuend, subtrahend): + """ + Return items in minuend not in subtrahend, retaining order + with O(1) lookup. + """ + return itertools.filterfalse(set(subtrahend).__contains__, minuend) + + +class CompleteDirs(zipfile.ZipFile): + """ + A ZipFile subclass that ensures that implied directories + are always included in the namelist. + """ + + @staticmethod + def _implied_dirs(names): + parents = itertools.chain.from_iterable(map(_parents, names)) + as_dirs = (p + posixpath.sep for p in parents) + return _dedupe(_difference(as_dirs, names)) + + def namelist(self): + names = super(CompleteDirs, self).namelist() + return names + list(self._implied_dirs(names)) + + def _name_set(self): + return set(self.namelist()) + + def resolve_dir(self, name): + """ + If the name represents a directory, return that name + as a directory (with the trailing slash). + """ + names = self._name_set() + dirname = name + '/' + dir_match = name not in names and dirname in names + return dirname if dir_match else name + + @classmethod + def make(cls, source): + """ + Given a source (filename or zipfile), return an + appropriate CompleteDirs subclass. + """ + if isinstance(source, CompleteDirs): + return source + + if not isinstance(source, zipfile.ZipFile): + return cls(_pathlib_compat(source)) + + # Only allow for FastLookup when supplied zipfile is read-only + if 'r' not in source.mode: + cls = CompleteDirs + + source.__class__ = cls + return source + + +class FastLookup(CompleteDirs): + """ + ZipFile subclass to ensure implicit + dirs exist and are resolved rapidly. + """ + + def namelist(self): + with contextlib.suppress(AttributeError): + return self.__names + self.__names = super(FastLookup, self).namelist() + return self.__names + + def _name_set(self): + with contextlib.suppress(AttributeError): + return self.__lookup + self.__lookup = super(FastLookup, self)._name_set() + return self.__lookup + + +def _pathlib_compat(path): + """ + For path-like objects, convert to a filename for compatibility + on Python 3.6.1 and earlier. + """ + try: + return path.__fspath__() + except AttributeError: + return str(path) + + +class Path: + """ + A pathlib-compatible interface for zip files. + + Consider a zip file with this structure:: + + . + ├── a.txt + └── b + ├── c.txt + └── d + └── e.txt + + >>> data = io.BytesIO() + >>> zf = zipfile.ZipFile(data, 'w') + >>> zf.writestr('a.txt', 'content of a') + >>> zf.writestr('b/c.txt', 'content of c') + >>> zf.writestr('b/d/e.txt', 'content of e') + >>> zf.filename = 'mem/abcde.zip' + + Path accepts the zipfile object itself or a filename + + >>> root = Path(zf) + + From there, several path operations are available. + + Directory iteration (including the zip file itself): + + >>> a, b = root.iterdir() + >>> a + Path('mem/abcde.zip', 'a.txt') + >>> b + Path('mem/abcde.zip', 'b/') + + name property: + + >>> b.name + 'b' + + join with divide operator: + + >>> c = b / 'c.txt' + >>> c + Path('mem/abcde.zip', 'b/c.txt') + >>> c.name + 'c.txt' + + Read text: + + >>> c.read_text() + 'content of c' + + existence: + + >>> c.exists() + True + >>> (b / 'missing.txt').exists() + False + + Coercion to string: + + >>> import os + >>> str(c).replace(os.sep, posixpath.sep) + 'mem/abcde.zip/b/c.txt' + + At the root, ``name``, ``filename``, and ``parent`` + resolve to the zipfile. Note these attributes are not + valid and will raise a ``ValueError`` if the zipfile + has no filename. + + >>> root.name + 'abcde.zip' + >>> str(root.filename).replace(os.sep, posixpath.sep) + 'mem/abcde.zip' + >>> str(root.parent) + 'mem' + """ + + __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})" + + def __init__(self, root, at=""): + """ + Construct a Path from a ZipFile or filename. + + Note: When the source is an existing ZipFile object, + its type (__class__) will be mutated to a + specialized type. If the caller wishes to retain the + original type, the caller should either create a + separate ZipFile object or pass a filename. + """ + self.root = FastLookup.make(root) + self.at = at + + def open(self, mode='r', *args, pwd=None, **kwargs): + """ + Open this entry as text or binary following the semantics + of ``pathlib.Path.open()`` by passing arguments through + to io.TextIOWrapper(). + """ + if self.is_dir(): + raise IsADirectoryError(self) + zip_mode = mode[0] + if not self.exists() and zip_mode == 'r': + raise FileNotFoundError(self) + stream = self.root.open(self.at, zip_mode, pwd=pwd) + if 'b' in mode: + if args or kwargs: + raise ValueError("encoding args invalid for binary operation") + return stream + return io.TextIOWrapper(stream, *args, **kwargs) + + @property + def name(self): + return pathlib.Path(self.at).name or self.filename.name + + @property + def suffix(self): + return pathlib.Path(self.at).suffix or self.filename.suffix + + @property + def suffixes(self): + return pathlib.Path(self.at).suffixes or self.filename.suffixes + + @property + def stem(self): + return pathlib.Path(self.at).stem or self.filename.stem + + @property + def filename(self): + return pathlib.Path(self.root.filename).joinpath(self.at) + + def read_text(self, *args, **kwargs): + with self.open('r', *args, **kwargs) as strm: + return strm.read() + + def read_bytes(self): + with self.open('rb') as strm: + return strm.read() + + def _is_child(self, path): + return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/") + + def _next(self, at): + return self.__class__(self.root, at) + + def is_dir(self): + return not self.at or self.at.endswith("/") + + def is_file(self): + return self.exists() and not self.is_dir() + + def exists(self): + return self.at in self.root._name_set() + + def iterdir(self): + if not self.is_dir(): + raise ValueError("Can't listdir a file") + subs = map(self._next, self.root.namelist()) + return filter(self._is_child, subs) + + def __str__(self): + return posixpath.join(self.root.filename, self.at) + + def __repr__(self): + return self.__repr.format(self=self) + + def joinpath(self, *other): + next = posixpath.join(self.at, *map(_pathlib_compat, other)) + return self._next(self.root.resolve_dir(next)) + + __truediv__ = joinpath + + @property + def parent(self): + if not self.at: + return self.filename.parent + parent_at = posixpath.dirname(self.at.rstrip('/')) + if parent_at: + parent_at += '/' + return self._next(parent_at) diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py index fed59295..70897eea 100644 --- a/pkg_resources/extern/__init__.py +++ b/pkg_resources/extern/__init__.py @@ -69,5 +69,8 @@ class VendorImporter: sys.meta_path.append(self) -names = 'packaging', 'pyparsing', 'appdirs' +names = ( + 'packaging', 'pyparsing', 'appdirs', 'jaraco', 'importlib_resources', + 'more_itertools', +) VendorImporter(__name__, names).install() diff --git a/pkg_resources/tests/test_working_set.py b/pkg_resources/tests/test_working_set.py index db13c714..575656ee 100644 --- a/pkg_resources/tests/test_working_set.py +++ b/pkg_resources/tests/test_working_set.py @@ -42,7 +42,7 @@ def parse_distributions(s): continue fields = spec.split('\n', 1) assert 1 <= len(fields) <= 2 - name, version = fields.pop(0).split('-') + name, version = fields.pop(0).rsplit('-', 1) if fields: requires = textwrap.dedent(fields.pop(0)) metadata = Metadata(('requires.txt', requires)) @@ -465,6 +465,25 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] VersionConflict ''', + + ''' + # id + wanted_normalized_name_installed_canonical + + # installed + foo.bar-3.6 + + # installable + + # wanted + foo-bar==3.6 + + # resolved + foo.bar-3.6 + + # resolved [replace conflicting] + foo.bar-3.6 + ''', ) def test_working_set_resolve(installed_dists, installable_dists, requirements, replace_conflicting, resolved_dists_or_exception): diff --git a/pyproject.toml b/pyproject.toml index 03c40125..6b426a37 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ addopts = "-n auto" directory = "changelog.d" title_format = "v{version}" issue_format = "#{issue}" - template = "towncrier_template.rst" + template = "tools/towncrier_template.rst" underlines = ["-", "^"] [[tool.towncrier.type]] @@ -12,8 +12,19 @@ filterwarnings= ## upstream # Suppress deprecation warning in flake8 ignore:SelectableGroups dict interface is deprecated::flake8 - # Suppress deprecation warning in pypa/packaging#433 - ignore:The distutils package is deprecated::packaging.tags + + # shopkeep/pytest-black#55 + ignore:<class 'pytest_black.BlackItem'> is not using a cooperative constructor:pytest.PytestDeprecationWarning + ignore:The \(fspath. py.path.local\) argument to BlackItem is deprecated.:pytest.PytestDeprecationWarning + ignore:BlackItem is an Item subclass and should not be a collector:pytest.PytestWarning + + # tholo/pytest-flake8#83 + ignore:<class 'pytest_flake8.Flake8Item'> is not using a cooperative constructor:pytest.PytestDeprecationWarning + ignore:The \(fspath. py.path.local\) argument to Flake8Item is deprecated.:pytest.PytestDeprecationWarning + ignore:Flake8Item is an Item subclass and should not be a collector:pytest.PytestWarning + + # dbader/pytest-mypy#131 + ignore:The \(fspath. py.path.local\) argument to MypyFile is deprecated.:pytest.PytestDeprecationWarning ## end upstream # https://github.com/pypa/setuptools/issues/1823 @@ -40,9 +51,12 @@ filterwarnings= # SETUPTOOLS_USE_DISTUTILS=stdlib but for # https://github.com/pytest-dev/pytest/discussions/9296 ignore:The distutils.sysconfig module is deprecated, use sysconfig instead + ignore:The distutils package is deprecated.* # Workaround for pypa/setuptools#2868 # ideally would apply to PyPy only but for # https://github.com/pytest-dev/pytest/discussions/9296 ignore:Distutils was imported before setuptools ignore:Setuptools is replacing distutils + + ignore:Support for project metadata in .pyproject.toml. is still experimental @@ -1,6 +1,6 @@ [metadata] name = setuptools -version = 59.2.0 +version = 62.1.0 author = Python Packaging Authority author_email = distutils-sig@python.org description = Easily download, build, install, upgrade, and uninstall Python packages @@ -24,7 +24,7 @@ project_urls = packages = find_namespace: # disabled as it causes tests to be included #2505 # include_package_data = true -python_requires = >=3.6 +python_requires = >=3.7 install_requires = [options.packages.find] @@ -34,6 +34,7 @@ exclude = docs* tests* *.tests + *.tests.* tools* [options.extras_require] @@ -45,34 +46,52 @@ testing = pytest-black >= 0.3.7; \ # workaround for jaraco/skeleton#22 python_implementation != "PyPy" - pytest-cov - pytest-mypy; \ + pytest-cov; \ + # coverage seems to make PyPy extremely slow + python_implementation != "PyPy" + pytest-mypy >= 0.9.1; \ # workaround for jaraco/skeleton#22 python_implementation != "PyPy" pytest-enabler >= 1.0.1 + pytest-perf # local mock flake8-2020 virtualenv>=13.0.0 - pytest-virtualenv>=1.2.7 # TODO: Update once man-group/pytest-plugins#188 is solved wheel - paver pip>=19.1 # For proper file:// URLs support. jaraco.envs>=2.2 pytest-xdist - sphinx jaraco.path>=3.2.0 + build[virtualenv] + filelock>=3.4.0 + pip_run>=8.8 + ini2toml[lite]>=0.9 + tomli-w>=1.0.0 + +testing-integration = + pytest + pytest-xdist + pytest-enabler + virtualenv>=13.0.0 + tomli + wheel + jaraco.path>=3.2.0 + jaraco.envs>=2.2 + build[virtualenv] + filelock>=3.4.0 docs = # upstream sphinx - jaraco.packaging >= 8.2 + jaraco.packaging >= 9 rst.linker >= 1.9 jaraco.tidelift >= 1.4 # local pygments-github-lexers==0.0.5 + sphinx-favicon sphinx-inline-tabs sphinxcontrib-towncrier furo @@ -54,7 +54,7 @@ class install_with_pth(install): _pth_contents = textwrap.dedent(""" import os var = 'SETUPTOOLS_USE_DISTUTILS' - enabled = os.environ.get(var, 'stdlib') == 'local' + enabled = os.environ.get(var, 'local') == 'local' enabled and __import__('_distutils_hack').add_shim() """).lstrip().replace('\n', '; ') diff --git a/setuptools/__init__.py b/setuptools/__init__.py index 9d6f0bc0..cff04323 100644 --- a/setuptools/__init__.py +++ b/setuptools/__init__.py @@ -1,15 +1,15 @@ """Extensions to the 'distutils' for large or complex distributions""" -from fnmatch import fnmatchcase import functools import os import re +import warnings import _distutils_hack.override # noqa: F401 import distutils.core from distutils.errors import DistutilsOptionError -from distutils.util import convert_path +from distutils.util import convert_path as _convert_path from ._deprecation_warning import SetuptoolsDeprecationWarning @@ -17,7 +17,9 @@ import setuptools.version from setuptools.extension import Extension from setuptools.dist import Distribution from setuptools.depends import Require +from setuptools.discovery import PackageFinder, PEP420PackageFinder from . import monkey +from . import logging __all__ = [ @@ -36,85 +38,6 @@ __version__ = setuptools.version.__version__ bootstrap_install_from = None -class PackageFinder: - """ - Generate a list of all Python packages found within a directory - """ - - @classmethod - def find(cls, where='.', exclude=(), include=('*',)): - """Return a list all Python packages found within directory 'where' - - 'where' is the root directory which will be searched for packages. It - should be supplied as a "cross-platform" (i.e. URL-style) path; it will - be converted to the appropriate local path syntax. - - 'exclude' is a sequence of package names to exclude; '*' can be used - as a wildcard in the names, such that 'foo.*' will exclude all - subpackages of 'foo' (but not 'foo' itself). - - 'include' is a sequence of package names to include. If it's - specified, only the named packages will be included. If it's not - specified, all found packages will be included. 'include' can contain - shell style wildcard patterns just like 'exclude'. - """ - - return list( - cls._find_packages_iter( - convert_path(where), - cls._build_filter('ez_setup', '*__pycache__', *exclude), - cls._build_filter(*include), - ) - ) - - @classmethod - def _find_packages_iter(cls, where, exclude, include): - """ - All the packages found in 'where' that pass the 'include' filter, but - not the 'exclude' filter. - """ - for root, dirs, files in os.walk(where, followlinks=True): - # Copy dirs to iterate over it, then empty dirs. - all_dirs = dirs[:] - dirs[:] = [] - - for dir in all_dirs: - full_path = os.path.join(root, dir) - rel_path = os.path.relpath(full_path, where) - package = rel_path.replace(os.path.sep, '.') - - # Skip directory trees that are not valid packages - if '.' in dir or not cls._looks_like_package(full_path): - continue - - # Should this package be included? - if include(package) and not exclude(package): - yield package - - # Keep searching subdirectories, as there may be more packages - # down there, even if the parent was excluded. - dirs.append(dir) - - @staticmethod - def _looks_like_package(path): - """Does a directory look like a package?""" - return os.path.isfile(os.path.join(path, '__init__.py')) - - @staticmethod - def _build_filter(*patterns): - """ - Given a list of patterns, return a callable that will be true only if - the input matches at least one of the patterns. - """ - return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns) - - -class PEP420PackageFinder(PackageFinder): - @staticmethod - def _looks_like_package(path): - return True - - find_packages = PackageFinder.find find_namespace_packages = PEP420PackageFinder.find @@ -131,7 +54,17 @@ def _install_setup_requires(attrs): def __init__(self, attrs): _incl = 'dependency_links', 'setup_requires' filtered = {k: attrs[k] for k in set(_incl) & set(attrs)} - distutils.core.Distribution.__init__(self, filtered) + super().__init__(filtered) + # Prevent accidentally triggering discovery with incomplete set of attrs + self.set_defaults._disable() + + def _get_project_config_files(self, filenames=None): + """Ignore ``pyproject.toml``, they are not related to setup_requires""" + try: + cfg, toml = super()._split_standard_project_metadata(filenames) + return cfg, () + except Exception: + return filenames, () def finalize_options(self): """ @@ -149,6 +82,7 @@ def _install_setup_requires(attrs): def setup(**attrs): # Make sure we have any requirements needed to interpret 'attrs'. + logging.configure() _install_setup_requires(attrs) return distutils.core.setup(**attrs) @@ -169,7 +103,7 @@ class Command(_Command): Construct the command for dist, updating vars(self) with any keyword parameters. """ - _Command.__init__(self, dist) + super().__init__(dist) vars(self).update(kw) def _ensure_stringlike(self, option, what, default=None): @@ -234,6 +168,19 @@ def findall(dir=os.curdir): return list(files) +@functools.wraps(_convert_path) +def convert_path(pathname): + from inspect import cleandoc + + msg = """ + The function `convert_path` is considered internal and not part of the public API. + Its direct usage by 3rd-party packages is considered deprecated and the function + may be removed in the future. + """ + warnings.warn(cleandoc(msg), SetuptoolsDeprecationWarning) + return _convert_path(pathname) + + class sic(str): """Treat this string as-is (https://en.wikipedia.org/wiki/Sic)""" diff --git a/setuptools/_distutils/_collections.py b/setuptools/_distutils/_collections.py new file mode 100644 index 00000000..98fce800 --- /dev/null +++ b/setuptools/_distutils/_collections.py @@ -0,0 +1,56 @@ +import collections +import itertools + + +# from jaraco.collections 3.5.1 +class DictStack(list, collections.abc.Mapping): + """ + A stack of dictionaries that behaves as a view on those dictionaries, + giving preference to the last. + + >>> stack = DictStack([dict(a=1, c=2), dict(b=2, a=2)]) + >>> stack['a'] + 2 + >>> stack['b'] + 2 + >>> stack['c'] + 2 + >>> len(stack) + 3 + >>> stack.push(dict(a=3)) + >>> stack['a'] + 3 + >>> set(stack.keys()) == set(['a', 'b', 'c']) + True + >>> set(stack.items()) == set([('a', 3), ('b', 2), ('c', 2)]) + True + >>> dict(**stack) == dict(stack) == dict(a=3, c=2, b=2) + True + >>> d = stack.pop() + >>> stack['a'] + 2 + >>> d = stack.pop() + >>> stack['a'] + 1 + >>> stack.get('b', None) + >>> 'c' in stack + True + """ + + def __iter__(self): + dicts = list.__iter__(self) + return iter(set(itertools.chain.from_iterable(c.keys() for c in dicts))) + + def __getitem__(self, key): + for scope in reversed(tuple(list.__iter__(self))): + if key in scope: + return scope[key] + raise KeyError(key) + + push = list.append + + def __contains__(self, other): + return collections.abc.Mapping.__contains__(self, other) + + def __len__(self): + return len(list(iter(self))) diff --git a/setuptools/_distutils/_macos_compat.py b/setuptools/_distutils/_macos_compat.py new file mode 100644 index 00000000..17769e91 --- /dev/null +++ b/setuptools/_distutils/_macos_compat.py @@ -0,0 +1,12 @@ +import sys +import importlib + + +def bypass_compiler_fixup(cmd, args): + return cmd + + +if sys.platform == 'darwin': + compiler_fixup = importlib.import_module('_osx_support').compiler_fixup +else: + compiler_fixup = bypass_compiler_fixup diff --git a/setuptools/_distutils/_msvccompiler.py b/setuptools/_distutils/_msvccompiler.py index b7a06082..f2f801c5 100644 --- a/setuptools/_distutils/_msvccompiler.py +++ b/setuptools/_distutils/_msvccompiler.py @@ -203,7 +203,7 @@ class MSVCCompiler(CCompiler) : def __init__(self, verbose=0, dry_run=0, force=0): - CCompiler.__init__ (self, verbose, dry_run, force) + super().__init__(verbose, dry_run, force) # target platform (.plat_name is consistent with 'bdist') self.plat_name = None self.initialized = False @@ -527,7 +527,7 @@ class MSVCCompiler(CCompiler) : return warnings.warn( "Fallback spawn triggered. Please update distutils monkeypatch.") - with unittest.mock.patch('os.environ', env): + with unittest.mock.patch.dict('os.environ', env): bag.value = super().spawn(cmd) # -- Miscellaneous methods ----------------------------------------- diff --git a/setuptools/_distutils/bcppcompiler.py b/setuptools/_distutils/bcppcompiler.py index 071fea5d..2eb6d2e9 100644 --- a/setuptools/_distutils/bcppcompiler.py +++ b/setuptools/_distutils/bcppcompiler.py @@ -55,7 +55,7 @@ class BCPPCompiler(CCompiler) : dry_run=0, force=0): - CCompiler.__init__ (self, verbose, dry_run, force) + super().__init__(verbose, dry_run, force) # These executables are assumed to all be in the path. # Borland doesn't seem to use any special registry settings to diff --git a/setuptools/_distutils/command/bdist_msi.py b/setuptools/_distutils/command/bdist_msi.py index 0863a188..15259532 100644 --- a/setuptools/_distutils/command/bdist_msi.py +++ b/setuptools/_distutils/command/bdist_msi.py @@ -27,7 +27,7 @@ class PyDialog(Dialog): def __init__(self, *args, **kw): """Dialog(database, name, x, y, w, h, attributes, title, first, default, cancel, bitmap=true)""" - Dialog.__init__(self, *args) + super().__init__(*args) ruler = self.h - 36 bmwidth = 152*ruler/328 #if kw.get("bitmap", True): diff --git a/setuptools/_distutils/command/build.py b/setuptools/_distutils/command/build.py index 4355a632..9606b81a 100644 --- a/setuptools/_distutils/command/build.py +++ b/setuptools/_distutils/command/build.py @@ -81,7 +81,8 @@ class build(Command): "--plat-name only supported on Windows (try " "using './configure --help' on your platform)") - plat_specifier = ".%s-%d.%d" % (self.plat_name, *sys.version_info[:2]) + plat_specifier = ".%s-%s" % (self.plat_name, + sys.implementation.cache_tag) # Make it so Python 2.x and Python 2.x with --with-pydebug don't # share the same build directories. Doing so confuses the build diff --git a/setuptools/_distutils/command/build_ext.py b/setuptools/_distutils/command/build_ext.py index 22628baf..181671bf 100644 --- a/setuptools/_distutils/command/build_ext.py +++ b/setuptools/_distutils/command/build_ext.py @@ -202,9 +202,7 @@ class build_ext(Command): # Append the source distribution include and library directories, # this allows distutils on windows to work in the source tree self.include_dirs.append(os.path.dirname(get_config_h_filename())) - _sys_home = getattr(sys, '_home', None) - if _sys_home: - self.library_dirs.append(_sys_home) + self.library_dirs.append(sys.base_exec_prefix) # Use the .lib files for the correct architecture if self.plat_name == 'win32': diff --git a/setuptools/_distutils/command/build_scripts.py b/setuptools/_distutils/command/build_scripts.py index e3312cf0..e56511da 100644 --- a/setuptools/_distutils/command/build_scripts.py +++ b/setuptools/_distutils/command/build_scripts.py @@ -2,7 +2,8 @@ Implements the Distutils 'build_scripts' command.""" -import os, re +import os +import re from stat import ST_MODE from distutils import sysconfig from distutils.core import Command @@ -11,8 +12,14 @@ from distutils.util import convert_path from distutils import log import tokenize -# check if Python is called on the first line with this expression -first_line_re = re.compile(b'^#!.*python[0-9.]*([ \t].*)?$') +shebang_pattern = re.compile('^#!.*python[0-9.]*([ \t].*)?$') +""" +Pattern matching a Python interpreter indicated in first line of a script. +""" + +# for Setuptools compatibility +first_line_re = shebang_pattern + class build_scripts(Command): @@ -26,13 +33,11 @@ class build_scripts(Command): boolean_options = ['force'] - def initialize_options(self): self.build_dir = None self.scripts = None self.force = None self.executable = None - self.outfiles = None def finalize_options(self): self.set_undefined_options('build', @@ -49,104 +54,117 @@ class build_scripts(Command): return self.copy_scripts() - def copy_scripts(self): - r"""Copy each script listed in 'self.scripts'; if it's marked as a - Python script in the Unix way (first line matches 'first_line_re', - ie. starts with "\#!" and contains "python"), then adjust the first - line to refer to the current Python interpreter as we copy. + """ + Copy each script listed in ``self.scripts``. + + If a script is marked as a Python script (first line matches + 'shebang_pattern', i.e. starts with ``#!`` and contains + "python"), then adjust in the copy the first line to refer to + the current Python interpreter. """ self.mkpath(self.build_dir) outfiles = [] updated_files = [] for script in self.scripts: - adjust = False - script = convert_path(script) - outfile = os.path.join(self.build_dir, os.path.basename(script)) - outfiles.append(outfile) - - if not self.force and not newer(script, outfile): - log.debug("not copying %s (up-to-date)", script) - continue - - # Always open the file, but ignore failures in dry-run mode -- - # that way, we'll get accurate feedback if we can read the - # script. - try: - f = open(script, "rb") - except OSError: - if not self.dry_run: - raise - f = None - else: - encoding, lines = tokenize.detect_encoding(f.readline) - f.seek(0) - first_line = f.readline() - if not first_line: - self.warn("%s is an empty file (skipping)" % script) - continue - - match = first_line_re.match(first_line) - if match: - adjust = True - post_interp = match.group(1) or b'' - - if adjust: - log.info("copying and adjusting %s -> %s", script, - self.build_dir) - updated_files.append(outfile) - if not self.dry_run: - if not sysconfig.python_build: - executable = self.executable - else: - executable = os.path.join( - sysconfig.get_config_var("BINDIR"), - "python%s%s" % (sysconfig.get_config_var("VERSION"), - sysconfig.get_config_var("EXE"))) - executable = os.fsencode(executable) - shebang = b"#!" + executable + post_interp + b"\n" - # Python parser starts to read a script using UTF-8 until - # it gets a #coding:xxx cookie. The shebang has to be the - # first line of a file, the #coding:xxx cookie cannot be - # written before. So the shebang has to be decodable from - # UTF-8. - try: - shebang.decode('utf-8') - except UnicodeDecodeError: - raise ValueError( - "The shebang ({!r}) is not decodable " - "from utf-8".format(shebang)) - # If the script is encoded to a custom encoding (use a - # #coding:xxx cookie), the shebang has to be decodable from - # the script encoding too. - try: - shebang.decode(encoding) - except UnicodeDecodeError: - raise ValueError( - "The shebang ({!r}) is not decodable " - "from the script encoding ({})" - .format(shebang, encoding)) - with open(outfile, "wb") as outf: - outf.write(shebang) - outf.writelines(f.readlines()) - if f: - f.close() - else: - if f: - f.close() - updated_files.append(outfile) - self.copy_file(script, outfile) - - if os.name == 'posix': - for file in outfiles: - if self.dry_run: - log.info("changing mode of %s", file) - else: - oldmode = os.stat(file)[ST_MODE] & 0o7777 - newmode = (oldmode | 0o555) & 0o7777 - if newmode != oldmode: - log.info("changing mode of %s from %o to %o", - file, oldmode, newmode) - os.chmod(file, newmode) - # XXX should we modify self.outfiles? + self._copy_script(script, outfiles, updated_files) + + self._change_modes(outfiles) + return outfiles, updated_files + + def _copy_script(self, script, outfiles, updated_files): + shebang_match = None + script = convert_path(script) + outfile = os.path.join(self.build_dir, os.path.basename(script)) + outfiles.append(outfile) + + if not self.force and not newer(script, outfile): + log.debug("not copying %s (up-to-date)", script) + return + + # Always open the file, but ignore failures in dry-run mode + # in order to attempt to copy directly. + try: + f = tokenize.open(script) + except OSError: + if not self.dry_run: + raise + f = None + else: + first_line = f.readline() + if not first_line: + self.warn("%s is an empty file (skipping)" % script) + return + + shebang_match = shebang_pattern.match(first_line) + + updated_files.append(outfile) + if shebang_match: + log.info("copying and adjusting %s -> %s", script, + self.build_dir) + if not self.dry_run: + if not sysconfig.python_build: + executable = self.executable + else: + executable = os.path.join( + sysconfig.get_config_var("BINDIR"), + "python%s%s" % ( + sysconfig.get_config_var("VERSION"), + sysconfig.get_config_var("EXE"))) + post_interp = shebang_match.group(1) or '' + shebang = "#!" + executable + post_interp + "\n" + self._validate_shebang(shebang, f.encoding) + with open(outfile, "w", encoding=f.encoding) as outf: + outf.write(shebang) + outf.writelines(f.readlines()) + if f: + f.close() + else: + if f: + f.close() + self.copy_file(script, outfile) + + def _change_modes(self, outfiles): + if os.name != 'posix': + return + + for file in outfiles: + self._change_mode(file) + + def _change_mode(self, file): + if self.dry_run: + log.info("changing mode of %s", file) + return + + oldmode = os.stat(file)[ST_MODE] & 0o7777 + newmode = (oldmode | 0o555) & 0o7777 + if newmode != oldmode: + log.info("changing mode of %s from %o to %o", + file, oldmode, newmode) + os.chmod(file, newmode) + + @staticmethod + def _validate_shebang(shebang, encoding): + # Python parser starts to read a script using UTF-8 until + # it gets a #coding:xxx cookie. The shebang has to be the + # first line of a file, the #coding:xxx cookie cannot be + # written before. So the shebang has to be encodable to + # UTF-8. + try: + shebang.encode('utf-8') + except UnicodeEncodeError: + raise ValueError( + "The shebang ({!r}) is not encodable " + "to utf-8".format(shebang)) + + # If the script is encoded to a custom encoding (use a + # #coding:xxx cookie), the shebang has to be encodable to + # the script encoding too. + try: + shebang.encode(encoding) + except UnicodeEncodeError: + raise ValueError( + "The shebang ({!r}) is not encodable " + "to the script encoding ({})" + .format(shebang, encoding)) diff --git a/setuptools/_distutils/command/check.py b/setuptools/_distutils/command/check.py index ada25006..af311ca9 100644 --- a/setuptools/_distutils/command/check.py +++ b/setuptools/_distutils/command/check.py @@ -2,6 +2,8 @@ Implements the Distutils 'check' command. """ +from email.utils import getaddresses + from distutils.core import Command from distutils.errors import DistutilsSetupError @@ -17,7 +19,7 @@ try: def __init__(self, source, report_level, halt_level, stream=None, debug=0, encoding='ascii', error_handler='replace'): self.messages = [] - Reporter.__init__(self, source, report_level, halt_level, stream, + super().__init__(source, report_level, halt_level, stream, debug, encoding, error_handler) def system_message(self, level, message, *children, **kwargs): @@ -96,19 +98,39 @@ class check(Command): if missing: self.warn("missing required meta-data: %s" % ', '.join(missing)) - if metadata.author: - if not metadata.author_email: - self.warn("missing meta-data: if 'author' supplied, " + - "'author_email' should be supplied too") - elif metadata.maintainer: - if not metadata.maintainer_email: - self.warn("missing meta-data: if 'maintainer' supplied, " + - "'maintainer_email' should be supplied too") - else: + if not ( + self._check_contact("author", metadata) or + self._check_contact("maintainer", metadata) + ): self.warn("missing meta-data: either (author and author_email) " + "or (maintainer and maintainer_email) " + "should be supplied") + def _check_contact(self, kind, metadata): + """ + Returns True if the contact's name is specified and False otherwise. + This function will warn if the contact's email is not specified. + """ + name = getattr(metadata, kind) or '' + email = getattr(metadata, kind + '_email') or '' + + msg = ("missing meta-data: if '{}' supplied, " + + "'{}' should be supplied too") + + if name and email: + return True + + if name: + self.warn(msg.format(kind, kind + '_email')) + return True + + addresses = [(alias, addr) for alias, addr in getaddresses([email])] + if any(alias and addr for alias, addr in addresses): + # The contact's name can be encoded in the email: `Name <email>` + return True + + return False + def check_restructuredtext(self): """Checks if the long string fields are reST-compliant.""" data = self.distribution.get_long_description() diff --git a/setuptools/_distutils/command/install.py b/setuptools/_distutils/command/install.py index c756b6db..41c17d8a 100644 --- a/setuptools/_distutils/command/install.py +++ b/setuptools/_distutils/command/install.py @@ -17,6 +17,7 @@ from distutils.file_util import write_file from distutils.util import convert_path, subst_vars, change_root from distutils.util import get_platform from distutils.errors import DistutilsOptionError +from .. import _collections from site import USER_BASE from site import USER_SITE @@ -67,8 +68,8 @@ if HAS_USER_SITE: INSTALL_SCHEMES['nt_user'] = { 'purelib': '{usersite}', 'platlib': '{usersite}', - 'headers': '{userbase}/{implementation}{py_version_nodot}/Include/{dist_name}', - 'scripts': '{userbase}/{implementation}{py_version_nodot}/Scripts', + 'headers': '{userbase}/{implementation}{py_version_nodot_plat}/Include/{dist_name}', + 'scripts': '{userbase}/{implementation}{py_version_nodot_plat}/Scripts', 'data' : '{userbase}', } @@ -118,6 +119,65 @@ def _get_implementation(): return 'Python' +def _select_scheme(ob, name): + scheme = _inject_headers(name, _load_scheme(_resolve_scheme(name))) + vars(ob).update(_remove_set(ob, _scheme_attrs(scheme))) + + +def _remove_set(ob, attrs): + """ + Include only attrs that are None in ob. + """ + return { + key: value + for key, value in attrs.items() + if getattr(ob, key) is None + } + + +def _resolve_scheme(name): + os_name, sep, key = name.partition('_') + try: + resolved = sysconfig.get_preferred_scheme(key) + except Exception: + resolved = _pypy_hack(name) + return resolved + + +def _load_scheme(name): + return _load_schemes()[name] + + +def _inject_headers(name, scheme): + """ + Given a scheme name and the resolved scheme, + if the scheme does not include headers, resolve + the fallback scheme for the name and use headers + from it. pypa/distutils#88 + """ + # Bypass the preferred scheme, which may not + # have defined headers. + fallback = _load_scheme(_pypy_hack(name)) + scheme.setdefault('headers', fallback['headers']) + return scheme + + +def _scheme_attrs(scheme): + """Resolve install directories by applying the install schemes.""" + return { + f'install_{key}': scheme[key] + for key in SCHEME_KEYS + } + + +def _pypy_hack(name): + PY37 = sys.version_info < (3, 8) + old_pypy = hasattr(sys, 'pypy_version_info') and PY37 + prefix = not name.endswith(('_user', '_home')) + pypy_name = 'pypy' + '_nt' * (os.name == 'nt') + return pypy_name if old_pypy and prefix else name + + class install(Command): description = "install everything from build directory" @@ -335,25 +395,35 @@ class install(Command): except AttributeError: # sys.abiflags may not be defined on all platforms. abiflags = '' - self.config_vars = {'dist_name': self.distribution.get_name(), - 'dist_version': self.distribution.get_version(), - 'dist_fullname': self.distribution.get_fullname(), - 'py_version': py_version, - 'py_version_short': '%d.%d' % sys.version_info[:2], - 'py_version_nodot': '%d%d' % sys.version_info[:2], - 'sys_prefix': prefix, - 'prefix': prefix, - 'sys_exec_prefix': exec_prefix, - 'exec_prefix': exec_prefix, - 'abiflags': abiflags, - 'platlibdir': getattr(sys, 'platlibdir', 'lib'), - 'implementation_lower': _get_implementation().lower(), - 'implementation': _get_implementation(), - } + local_vars = { + 'dist_name': self.distribution.get_name(), + 'dist_version': self.distribution.get_version(), + 'dist_fullname': self.distribution.get_fullname(), + 'py_version': py_version, + 'py_version_short': '%d.%d' % sys.version_info[:2], + 'py_version_nodot': '%d%d' % sys.version_info[:2], + 'sys_prefix': prefix, + 'prefix': prefix, + 'sys_exec_prefix': exec_prefix, + 'exec_prefix': exec_prefix, + 'abiflags': abiflags, + 'platlibdir': getattr(sys, 'platlibdir', 'lib'), + 'implementation_lower': _get_implementation().lower(), + 'implementation': _get_implementation(), + } + + # vars for compatibility on older Pythons + compat_vars = dict( + # Python 3.9 and earlier + py_version_nodot_plat=getattr(sys, 'winver', '').replace('.', ''), + ) if HAS_USER_SITE: - self.config_vars['userbase'] = self.install_userbase - self.config_vars['usersite'] = self.install_usersite + local_vars['userbase'] = self.install_userbase + local_vars['usersite'] = self.install_usersite + + self.config_vars = _collections.DictStack( + [compat_vars, sysconfig.get_config_vars(), local_vars]) self.expand_basedirs() @@ -361,15 +431,13 @@ class install(Command): # Now define config vars for the base directories so we can expand # everything else. - self.config_vars['base'] = self.install_base - self.config_vars['platbase'] = self.install_platbase - self.config_vars['installed_base'] = ( - sysconfig.get_config_vars()['installed_base']) + local_vars['base'] = self.install_base + local_vars['platbase'] = self.install_platbase if DEBUG: from pprint import pprint print("config vars:") - pprint(self.config_vars) + pprint(dict(self.config_vars)) # Expand "~" and configuration variables in the installation # directories. @@ -445,12 +513,17 @@ class install(Command): def finalize_unix(self): """Finalizes options for posix platforms.""" if self.install_base is not None or self.install_platbase is not None: - if ((self.install_lib is None and - self.install_purelib is None and - self.install_platlib is None) or + incomplete_scheme = ( + ( + self.install_lib is None and + self.install_purelib is None and + self.install_platlib is None + ) or self.install_headers is None or self.install_scripts is None or - self.install_data is None): + self.install_data is None + ) + if incomplete_scheme: raise DistutilsOptionError( "install-base or install-platbase supplied, but " "installation scheme is incomplete") @@ -471,8 +544,13 @@ class install(Command): raise DistutilsOptionError( "must not supply exec-prefix without prefix") - self.prefix = os.path.normpath(sys.prefix) - self.exec_prefix = os.path.normpath(sys.exec_prefix) + # Allow Fedora to add components to the prefix + _prefix_addition = getattr(sysconfig, '_prefix_addition', "") + + self.prefix = ( + os.path.normpath(sys.prefix) + _prefix_addition) + self.exec_prefix = ( + os.path.normpath(sys.exec_prefix) + _prefix_addition) else: if self.exec_prefix is None: @@ -505,20 +583,7 @@ class install(Command): "I don't know how to install stuff on '%s'" % os.name) def select_scheme(self, name): - """Sets the install directories by applying the install schemes.""" - # it's the caller's problem if they supply a bad name! - if (hasattr(sys, 'pypy_version_info') and - sys.version_info < (3, 8) and - not name.endswith(('_user', '_home'))): - if os.name == 'nt': - name = 'pypy_nt' - else: - name = 'pypy' - scheme = _load_schemes()[name] - for key in SCHEME_KEYS: - attrname = 'install_' + key - if getattr(self, attrname) is None: - setattr(self, attrname, scheme[key]) + _select_scheme(self, name) def _expand_attrs(self, attrs): for attr in attrs: @@ -592,7 +657,7 @@ class install(Command): return home = convert_path(os.path.expanduser("~")) for name, path in self.config_vars.items(): - if path.startswith(home) and not os.path.isdir(path): + if str(path).startswith(home) and not os.path.isdir(path): self.debug_print("os.makedirs('%s', 0o700)" % path) os.makedirs(path, 0o700) diff --git a/setuptools/_distutils/cygwinccompiler.py b/setuptools/_distutils/cygwinccompiler.py index f80ca622..c5c86d8f 100644 --- a/setuptools/_distutils/cygwinccompiler.py +++ b/setuptools/_distutils/cygwinccompiler.py @@ -50,15 +50,15 @@ cygwin in no-cygwin mode). import os import sys import copy -from subprocess import Popen, PIPE, check_output -import re +import shlex +import warnings +from subprocess import check_output from distutils.unixccompiler import UnixCCompiler from distutils.file_util import write_file from distutils.errors import (DistutilsExecError, CCompilerError, CompileError, UnknownFileError) -from distutils.version import LooseVersion -from distutils.spawn import find_executable +from distutils.version import LooseVersion, suppress_known_deprecation def get_msvcr(): """Include the appropriate MSVC runtime library if Python was built @@ -108,7 +108,7 @@ class CygwinCCompiler(UnixCCompiler): def __init__(self, verbose=0, dry_run=0, force=0): - UnixCCompiler.__init__(self, verbose, dry_run, force) + super().__init__(verbose, dry_run, force) status, details = check_config_h() self.debug_print("Python's GCC status: %s (details: %s)" % @@ -123,33 +123,8 @@ class CygwinCCompiler(UnixCCompiler): self.cc = os.environ.get('CC', 'gcc') self.cxx = os.environ.get('CXX', 'g++') - if ('gcc' in self.cc): # Start gcc workaround - self.gcc_version, self.ld_version, self.dllwrap_version = \ - get_versions() - self.debug_print(self.compiler_type + ": gcc %s, ld %s, dllwrap %s\n" % - (self.gcc_version, - self.ld_version, - self.dllwrap_version) ) - - # ld_version >= "2.10.90" and < "2.13" should also be able to use - # gcc -mdll instead of dllwrap - # Older dllwraps had own version numbers, newer ones use the - # same as the rest of binutils ( also ld ) - # dllwrap 2.10.90 is buggy - if self.ld_version >= "2.10.90": - self.linker_dll = self.cc - else: - self.linker_dll = "dllwrap" - - # ld_version >= "2.13" support -shared so use it instead of - # -mdll -static - if self.ld_version >= "2.13": - shared_option = "-shared" - else: - shared_option = "-mdll -static" - else: # Assume linker is up to date - self.linker_dll = self.cc - shared_option = "-shared" + self.linker_dll = self.cc + shared_option = "-shared" self.set_executables(compiler='%s -mcygwin -O -Wall' % self.cc, compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc, @@ -158,17 +133,24 @@ class CygwinCCompiler(UnixCCompiler): linker_so=('%s -mcygwin %s' % (self.linker_dll, shared_option))) - # cygwin and mingw32 need different sets of libraries - if ('gcc' in self.cc and self.gcc_version == "2.91.57"): - # cygwin shouldn't need msvcrt, but without the dlls will crash - # (gcc version 2.91.57) -- perhaps something about initialization - self.dll_libraries=["msvcrt"] - self.warn( - "Consider upgrading to a newer version of gcc") - else: - # Include the appropriate MSVC runtime library if Python was built - # with MSVC 7.0 or later. - self.dll_libraries = get_msvcr() + # Include the appropriate MSVC runtime library if Python was built + # with MSVC 7.0 or later. + self.dll_libraries = get_msvcr() + + @property + def gcc_version(self): + # Older numpy dependend on this existing to check for ancient + # gcc versions. This doesn't make much sense with clang etc so + # just hardcode to something recent. + # https://github.com/numpy/numpy/pull/20333 + warnings.warn( + "gcc_version attribute of CygwinCCompiler is deprecated. " + "Instead of returning actual gcc version a fixed value 11.2.0 is returned.", + DeprecationWarning, + stacklevel=2, + ) + with suppress_known_deprecation(): + return LooseVersion("11.2.0") def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): """Compiles the source by spawning GCC and windres if needed.""" @@ -230,24 +212,17 @@ class CygwinCCompiler(UnixCCompiler): # next add options for def-file and to creating import libraries - # dllwrap uses different options than gcc/ld - if self.linker_dll == "dllwrap": - extra_preargs.extend(["--output-lib", lib_file]) - # for dllwrap we have to use a special option - extra_preargs.extend(["--def", def_file]) - # we use gcc/ld here and can be sure ld is >= 2.9.10 - else: - # doesn't work: bfd_close build\...\libfoo.a: Invalid operation - #extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file]) - # for gcc/ld the def-file is specified as any object files - objects.append(def_file) + # doesn't work: bfd_close build\...\libfoo.a: Invalid operation + #extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file]) + # for gcc/ld the def-file is specified as any object files + objects.append(def_file) #end: if ((export_symbols is not None) and # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): # who wants symbols and a many times larger output file # should explicitly switch the debug mode on - # otherwise we let dllwrap/ld strip the output file + # otherwise we let ld strip the output file # (On my machine: 10KiB < stripped_file < ??100KiB # unstripped_file = stripped_file + XXX KiB # ( XXX=254 for a typical python extension)) @@ -293,21 +268,9 @@ class Mingw32CCompiler(CygwinCCompiler): def __init__(self, verbose=0, dry_run=0, force=0): - CygwinCCompiler.__init__ (self, verbose, dry_run, force) - - # ld_version >= "2.13" support -shared so use it instead of - # -mdll -static - if ('gcc' in self.cc and self.ld_version < "2.13"): - shared_option = "-mdll -static" - else: - shared_option = "-shared" + super().__init__ (verbose, dry_run, force) - # A real mingw32 doesn't need to specify a different entry point, - # but cygwin 2.91.57 in no-cygwin-mode needs it. - if ('gcc' in self.cc and self.gcc_version <= "2.91.57"): - entry_point = '--entry _DllMain@12' - else: - entry_point = '' + shared_option = "-shared" if is_cygwincc(self.cc): raise CCompilerError( @@ -317,9 +280,9 @@ class Mingw32CCompiler(CygwinCCompiler): compiler_so='%s -mdll -O -Wall' % self.cc, compiler_cxx='%s -O -Wall' % self.cxx, linker_exe='%s' % self.cc, - linker_so='%s %s %s' - % (self.linker_dll, shared_option, - entry_point)) + linker_so='%s %s' + % (self.linker_dll, shared_option)) + # Maybe we should also append -mthreads, but then the finished # dlls need another dll (mingwm10.dll see Mingw32 docs) # (-mthreads: Support thread-safe exception handling on `Mingw32') @@ -386,38 +349,14 @@ def check_config_h(): return (CONFIG_H_UNCERTAIN, "couldn't read '%s': %s" % (fn, exc.strerror)) -RE_VERSION = re.compile(br'(\d+\.\d+(\.\d+)*)') - -def _find_exe_version(cmd): - """Find the version of an executable by running `cmd` in the shell. - - If the command is not found, or the output does not match - `RE_VERSION`, returns None. - """ - executable = cmd.split()[0] - if find_executable(executable) is None: - return None - out = Popen(cmd, shell=True, stdout=PIPE).stdout - try: - out_string = out.read() - finally: - out.close() - result = RE_VERSION.search(out_string) - if result is None: - return None - # LooseVersion works with strings - # so we need to decode our bytes - return LooseVersion(result.group(1).decode()) - -def get_versions(): - """ Try to find out the versions of gcc, ld and dllwrap. - - If not possible it returns None for it. - """ - commands = ['gcc -dumpversion', 'ld -v', 'dllwrap --version'] - return tuple([_find_exe_version(cmd) for cmd in commands]) - def is_cygwincc(cc): '''Try to determine if the compiler that would be used is from cygwin.''' - out_string = check_output([cc, '-dumpmachine']) + out_string = check_output(shlex.split(cc) + ['-dumpmachine']) return out_string.strip().endswith(b'cygwin') + + +get_versions = None +""" +A stand-in for the previous get_versions() function to prevent failures +when monkeypatched. See pypa/setuptools#2969. +""" diff --git a/setuptools/_distutils/log.py b/setuptools/_distutils/log.py index 8ef6b28e..a68b156b 100644 --- a/setuptools/_distutils/log.py +++ b/setuptools/_distutils/log.py @@ -3,13 +3,14 @@ # The class here is styled after PEP 282 so that it could later be # replaced with a standard Python logging implementation. +import sys + DEBUG = 1 INFO = 2 WARN = 3 ERROR = 4 FATAL = 5 -import sys class Log: @@ -54,6 +55,7 @@ class Log: def fatal(self, msg, *args): self._log(FATAL, msg, args) + _global_log = Log() log = _global_log.log debug = _global_log.debug @@ -62,12 +64,14 @@ warn = _global_log.warn error = _global_log.error fatal = _global_log.fatal + def set_threshold(level): # return the old threshold for use from tests old = _global_log.threshold _global_log.threshold = level return old + def set_verbosity(v): if v <= 0: set_threshold(WARN) diff --git a/setuptools/_distutils/msvc9compiler.py b/setuptools/_distutils/msvc9compiler.py index a1b3b02f..6b627383 100644 --- a/setuptools/_distutils/msvc9compiler.py +++ b/setuptools/_distutils/msvc9compiler.py @@ -291,8 +291,6 @@ def query_vcvarsall(version, arch="x86"): # More globals VERSION = get_build_version() -if VERSION < 8.0: - raise DistutilsPlatformError("VC %0.1f is not supported by this module" % VERSION) # MACROS = MacroExpander(VERSION) class MSVCCompiler(CCompiler) : @@ -326,7 +324,7 @@ class MSVCCompiler(CCompiler) : exe_extension = '.exe' def __init__(self, verbose=0, dry_run=0, force=0): - CCompiler.__init__ (self, verbose, dry_run, force) + super().__init__(verbose, dry_run, force) self.__version = VERSION self.__root = r"Software\Microsoft\VisualStudio" # self.__macros = MACROS @@ -339,6 +337,8 @@ class MSVCCompiler(CCompiler) : def initialize(self, plat_name=None): # multi-init means we would need to check platform same each time... assert not self.initialized, "don't init multiple times" + if self.__version < 8.0: + raise DistutilsPlatformError("VC %0.1f is not supported by this module" % self.__version) if plat_name is None: plat_name = get_platform() # sanity check for platforms to prevent obscure errors later. diff --git a/setuptools/_distutils/msvccompiler.py b/setuptools/_distutils/msvccompiler.py index 2d447b85..e1367b89 100644 --- a/setuptools/_distutils/msvccompiler.py +++ b/setuptools/_distutils/msvccompiler.py @@ -228,7 +228,7 @@ class MSVCCompiler(CCompiler) : exe_extension = '.exe' def __init__(self, verbose=0, dry_run=0, force=0): - CCompiler.__init__ (self, verbose, dry_run, force) + super().__init__(verbose, dry_run, force) self.__version = get_build_version() self.__arch = get_build_architecture() if self.__arch == "Intel": diff --git a/setuptools/_distutils/py39compat.py b/setuptools/_distutils/py39compat.py new file mode 100644 index 00000000..9de95013 --- /dev/null +++ b/setuptools/_distutils/py39compat.py @@ -0,0 +1,21 @@ +import sys +import platform + + +def add_ext_suffix_39(vars): + """ + Ensure vars contains 'EXT_SUFFIX'. pypa/distutils#130 + """ + import _imp + ext_suffix = _imp.extension_suffixes()[0] + vars.update( + EXT_SUFFIX=ext_suffix, + # sysconfig sets SO to match EXT_SUFFIX, so maintain + # that expectation. + # https://github.com/python/cpython/blob/785cc6770588de087d09e89a69110af2542be208/Lib/sysconfig.py#L671-L673 + SO=ext_suffix, + ) + + +needs_ext_suffix = sys.version_info < (3, 10) and platform.system() == 'Windows' +add_ext_suffix = add_ext_suffix_39 if needs_ext_suffix else lambda vars: None diff --git a/setuptools/_distutils/spawn.py b/setuptools/_distutils/spawn.py index 6e1c89f1..b2d10e39 100644 --- a/setuptools/_distutils/spawn.py +++ b/setuptools/_distutils/spawn.py @@ -10,7 +10,7 @@ import sys import os import subprocess -from distutils.errors import DistutilsPlatformError, DistutilsExecError +from distutils.errors import DistutilsExecError from distutils.debug import DEBUG from distutils import log diff --git a/setuptools/_distutils/sysconfig.py b/setuptools/_distutils/sysconfig.py index d36d94f7..55a42e16 100644 --- a/setuptools/_distutils/sysconfig.py +++ b/setuptools/_distutils/sysconfig.py @@ -9,12 +9,13 @@ Written by: Fred L. Drake, Jr. Email: <fdrake@acm.org> """ -import _imp import os import re import sys +import sysconfig from .errors import DistutilsPlatformError +from . import py39compat IS_PYPY = '__pypy__' in sys.builtin_module_names @@ -47,6 +48,7 @@ def _is_python_source_dir(d): return True return False + _sys_home = getattr(sys, '_home', None) if os.name == 'nt': @@ -58,11 +60,13 @@ if os.name == 'nt': project_base = _fix_pcbuild(project_base) _sys_home = _fix_pcbuild(_sys_home) + def _python_build(): if _sys_home: return _is_python_source_dir(_sys_home) return _is_python_source_dir(project_base) + python_build = _python_build() @@ -78,6 +82,7 @@ except AttributeError: # this attribute, which is fine. pass + def get_python_version(): """Return a string containing the major and minor Python version, leaving off the patchlevel. Sample return values could be '1.5' @@ -191,7 +196,6 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): "on platform '%s'" % os.name) - def customize_compiler(compiler): """Do any platform-specific customization of a CCompiler instance. @@ -216,8 +220,9 @@ def customize_compiler(compiler): _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \ - get_config_vars('CC', 'CXX', 'CFLAGS', - 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') + get_config_vars( + 'CC', 'CXX', 'CFLAGS', + 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') if 'CC' in os.environ: newcc = os.environ['CC'] @@ -274,31 +279,14 @@ def get_config_h_filename(): inc_dir = os.path.join(_sys_home or project_base, "PC") else: inc_dir = _sys_home or project_base + return os.path.join(inc_dir, 'pyconfig.h') else: - inc_dir = get_python_inc(plat_specific=1) - - return os.path.join(inc_dir, 'pyconfig.h') - - -# Allow this value to be patched by pkgsrc. Ref pypa/distutils#16. -_makefile_tmpl = 'config-{python_ver}{build_flags}{multiarch}' + return sysconfig.get_config_h_filename() def get_makefile_filename(): """Return full pathname of installed Makefile from the Python build.""" - if python_build: - return os.path.join(_sys_home or project_base, "Makefile") - lib_dir = get_python_lib(plat_specific=0, standard_lib=1) - multiarch = ( - '-%s' % sys.implementation._multiarch - if hasattr(sys.implementation, '_multiarch') else '' - ) - config_file = _makefile_tmpl.format( - python_ver=get_python_version(), - build_flags=build_flags, - multiarch=multiarch, - ) - return os.path.join(lib_dir, config_file, 'Makefile') + return sysconfig.get_makefile_filename() def parse_config_h(fp, g=None): @@ -308,26 +296,7 @@ def parse_config_h(fp, g=None): optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ - if g is None: - g = {} - define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n") - undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n") - # - while True: - line = fp.readline() - if not line: - break - m = define_rx.match(line) - if m: - n, v = m.group(1, 2) - try: v = int(v) - except ValueError: pass - g[n] = v - else: - m = undef_rx.match(line) - if m: - g[m.group(1)] = 0 - return g + return sysconfig.parse_config_h(fp, vars=g) # Regexes needed for parsing Makefile (and similar syntaxes, @@ -336,6 +305,7 @@ _variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") + def parse_makefile(fn, g=None): """Parse a Makefile-style file. @@ -344,7 +314,9 @@ def parse_makefile(fn, g=None): used instead of a new dictionary. """ from distutils.text_file import TextFile - fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape") + fp = TextFile( + fn, strip_comments=1, skip_blanks=1, join_lines=1, + errors="surrogateescape") if g is None: g = {} @@ -353,7 +325,7 @@ def parse_makefile(fn, g=None): while True: line = fp.readline() - if line is None: # eof + if line is None: # eof break m = _variable_rx.match(line) if m: @@ -397,7 +369,8 @@ def parse_makefile(fn, g=None): item = os.environ[n] elif n in renamed_variables: - if name.startswith('PY_') and name[3:] in renamed_variables: + if name.startswith('PY_') and \ + name[3:] in renamed_variables: item = "" elif 'PY_' + n in notdone: @@ -413,7 +386,8 @@ def parse_makefile(fn, g=None): if "$" in after: notdone[name] = value else: - try: value = int(value) + try: + value = int(value) except ValueError: done[name] = value.strip() else: @@ -421,7 +395,7 @@ def parse_makefile(fn, g=None): del notdone[name] if name.startswith('PY_') \ - and name[3:] in renamed_variables: + and name[3:] in renamed_variables: name = name[3:] if name not in done: @@ -470,51 +444,6 @@ def expand_makefile_vars(s, vars): _config_vars = None -_sysconfig_name_tmpl = '_sysconfigdata_{abi}_{platform}_{multiarch}' - - -def _init_posix(): - """Initialize the module as appropriate for POSIX systems.""" - # _sysconfigdata is generated at build time, see the sysconfig module - name = os.environ.get( - '_PYTHON_SYSCONFIGDATA_NAME', - _sysconfig_name_tmpl.format( - abi=sys.abiflags, - platform=sys.platform, - multiarch=getattr(sys.implementation, '_multiarch', ''), - ), - ) - try: - _temp = __import__(name, globals(), locals(), ['build_time_vars'], 0) - except ImportError: - # Python 3.5 and pypy 7.3.1 - _temp = __import__( - '_sysconfigdata', globals(), locals(), ['build_time_vars'], 0) - build_time_vars = _temp.build_time_vars - global _config_vars - _config_vars = {} - _config_vars.update(build_time_vars) - - -def _init_nt(): - """Initialize the module as appropriate for NT""" - g = {} - # set basic install directories - g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1) - g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1) - - # XXX hmmm.. a normal install puts include files here - g['INCLUDEPY'] = get_python_inc(plat_specific=0) - - g['EXT_SUFFIX'] = _imp.extension_suffixes()[0] - g['EXE'] = ".exe" - g['VERSION'] = get_python_version().replace(".", "") - g['BINDIR'] = os.path.dirname(os.path.abspath(sys.executable)) - - global _config_vars - _config_vars = g - - def get_config_vars(*args): """With no arguments, return a dictionary of all configuration variables relevant for the current platform. Generally this includes @@ -527,60 +456,8 @@ def get_config_vars(*args): """ global _config_vars if _config_vars is None: - func = globals().get("_init_" + os.name) - if func: - func() - else: - _config_vars = {} - - # Normalized versions of prefix and exec_prefix are handy to have; - # in fact, these are the standard versions used most places in the - # Distutils. - _config_vars['prefix'] = PREFIX - _config_vars['exec_prefix'] = EXEC_PREFIX - - if not IS_PYPY: - # For backward compatibility, see issue19555 - SO = _config_vars.get('EXT_SUFFIX') - if SO is not None: - _config_vars['SO'] = SO - - # Always convert srcdir to an absolute path - srcdir = _config_vars.get('srcdir', project_base) - if os.name == 'posix': - if python_build: - # If srcdir is a relative path (typically '.' or '..') - # then it should be interpreted relative to the directory - # containing Makefile. - base = os.path.dirname(get_makefile_filename()) - srcdir = os.path.join(base, srcdir) - else: - # srcdir is not meaningful since the installation is - # spread about the filesystem. We choose the - # directory containing the Makefile since we know it - # exists. - srcdir = os.path.dirname(get_makefile_filename()) - _config_vars['srcdir'] = os.path.abspath(os.path.normpath(srcdir)) - - # Convert srcdir into an absolute path if it appears necessary. - # Normally it is relative to the build directory. However, during - # testing, for example, we might be running a non-installed python - # from a different directory. - if python_build and os.name == "posix": - base = project_base - if (not os.path.isabs(_config_vars['srcdir']) and - base != os.getcwd()): - # srcdir is relative and we are not in the same directory - # as the executable. Assume executable is in the build - # directory and make srcdir absolute. - srcdir = os.path.join(base, _config_vars['srcdir']) - _config_vars['srcdir'] = os.path.normpath(srcdir) - - # OS X platforms require special customization to handle - # multi-architecture, multi-os-version installers - if sys.platform == 'darwin': - import _osx_support - _osx_support.customize_config_vars(_config_vars) + _config_vars = sysconfig.get_config_vars().copy() + py39compat.add_ext_suffix(_config_vars) if args: vals = [] @@ -590,6 +467,7 @@ def get_config_vars(*args): else: return _config_vars + def get_config_var(name): """Return the value of a single variable using the dictionary returned by 'get_config_vars()'. Equivalent to @@ -597,5 +475,6 @@ def get_config_var(name): """ if name == 'SO': import warnings - warnings.warn('SO is deprecated, use EXT_SUFFIX', DeprecationWarning, 2) + warnings.warn( + 'SO is deprecated, use EXT_SUFFIX', DeprecationWarning, 2) return get_config_vars().get(name) diff --git a/setuptools/_distutils/tests/py38compat.py b/setuptools/_distutils/tests/py38compat.py index 32269c7b..c949f58e 100644 --- a/setuptools/_distutils/tests/py38compat.py +++ b/setuptools/_distutils/tests/py38compat.py @@ -2,6 +2,11 @@ import contextlib import builtins +import sys + +from test.support import requires_zlib +import test.support + ModuleNotFoundError = getattr(builtins, 'ModuleNotFoundError', ImportError) @@ -51,3 +56,7 @@ try: from test.support.warnings_helper import save_restore_warnings_filters except (ModuleNotFoundError, ImportError): save_restore_warnings_filters = _save_restore_warnings_filters + + +if sys.version_info < (3, 9): + requires_zlib = lambda: test.support.requires_zlib diff --git a/setuptools/_distutils/tests/test_archive_util.py b/setuptools/_distutils/tests/test_archive_util.py index ce6456dc..800b9018 100644 --- a/setuptools/_distutils/tests/test_archive_util.py +++ b/setuptools/_distutils/tests/test_archive_util.py @@ -14,16 +14,11 @@ from distutils.archive_util import (check_archive_formats, make_tarball, from distutils.spawn import find_executable, spawn from distutils.tests import support from test.support import run_unittest, patch +from .unix_compat import require_unix_id, require_uid_0, grp, pwd, UID_0_SUPPORT from .py38compat import change_cwd from .py38compat import check_warnings -try: - import grp - import pwd - UID_GID_SUPPORT = True -except ImportError: - UID_GID_SUPPORT = False try: import zipfile @@ -339,7 +334,7 @@ class ArchiveUtilTestCase(support.TempdirManager, def test_make_archive_owner_group(self): # testing make_archive with owner and group, with various combinations # this works even if there's not gid/uid support - if UID_GID_SUPPORT: + if UID_0_SUPPORT: group = grp.getgrgid(0)[0] owner = pwd.getpwuid(0)[0] else: @@ -364,7 +359,8 @@ class ArchiveUtilTestCase(support.TempdirManager, self.assertTrue(os.path.exists(res)) @unittest.skipUnless(ZLIB_SUPPORT, "Requires zlib") - @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support") + @require_unix_id + @require_uid_0 def test_tarfile_root_owner(self): tmpdir = self._create_files() base_name = os.path.join(self.mkdtemp(), 'archive') @@ -391,7 +387,7 @@ class ArchiveUtilTestCase(support.TempdirManager, archive.close() def test_suite(): - return unittest.makeSuite(ArchiveUtilTestCase) + return unittest.TestLoader().loadTestsFromTestCase(ArchiveUtilTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_bdist.py b/setuptools/_distutils/tests/test_bdist.py index 130d8bf1..8b7498e3 100644 --- a/setuptools/_distutils/tests/test_bdist.py +++ b/setuptools/_distutils/tests/test_bdist.py @@ -51,7 +51,7 @@ class BuildTestCase(support.TempdirManager, def test_suite(): - return unittest.makeSuite(BuildTestCase) + return unittest.TestLoader().loadTestsFromTestCase(BuildTestCase) if __name__ == '__main__': run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_bdist_dumb.py b/setuptools/_distutils/tests/test_bdist_dumb.py index 01a233bc..bb860c8a 100644 --- a/setuptools/_distutils/tests/test_bdist_dumb.py +++ b/setuptools/_distutils/tests/test_bdist_dumb.py @@ -91,7 +91,7 @@ class BuildDumbTestCase(support.TempdirManager, self.assertEqual(contents, sorted(wanted)) def test_suite(): - return unittest.makeSuite(BuildDumbTestCase) + return unittest.TestLoader().loadTestsFromTestCase(BuildDumbTestCase) if __name__ == '__main__': run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_bdist_msi.py b/setuptools/_distutils/tests/test_bdist_msi.py index 937266f8..b1831ef2 100644 --- a/setuptools/_distutils/tests/test_bdist_msi.py +++ b/setuptools/_distutils/tests/test_bdist_msi.py @@ -22,7 +22,7 @@ class BDistMSITestCase(support.TempdirManager, def test_suite(): - return unittest.makeSuite(BDistMSITestCase) + return unittest.TestLoader().loadTestsFromTestCase(BDistMSITestCase) if __name__ == '__main__': run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_bdist_rpm.py b/setuptools/_distutils/tests/test_bdist_rpm.py index 6453a02b..08a7cb46 100644 --- a/setuptools/_distutils/tests/test_bdist_rpm.py +++ b/setuptools/_distutils/tests/test_bdist_rpm.py @@ -3,13 +3,16 @@ import unittest import sys import os -from test.support import run_unittest, requires_zlib +from test.support import run_unittest from distutils.core import Distribution from distutils.command.bdist_rpm import bdist_rpm from distutils.tests import support from distutils.spawn import find_executable +from .py38compat import requires_zlib + + SETUP_PY = """\ from distutils.core import setup import foo @@ -44,7 +47,7 @@ class BuildRpmTestCase(support.TempdirManager, # spurious sdtout/stderr output under Mac OS X @unittest.skipUnless(sys.platform.startswith('linux'), 'spurious sdtout/stderr output under Mac OS X') - @requires_zlib + @requires_zlib() @unittest.skipIf(find_executable('rpm') is None, 'the rpm command is not found') @unittest.skipIf(find_executable('rpmbuild') is None, @@ -87,7 +90,7 @@ class BuildRpmTestCase(support.TempdirManager, # spurious sdtout/stderr output under Mac OS X @unittest.skipUnless(sys.platform.startswith('linux'), 'spurious sdtout/stderr output under Mac OS X') - @requires_zlib + @requires_zlib() # http://bugs.python.org/issue1533164 @unittest.skipIf(find_executable('rpm') is None, 'the rpm command is not found') @@ -129,7 +132,7 @@ class BuildRpmTestCase(support.TempdirManager, os.remove(os.path.join(pkg_dir, 'dist', 'foo-0.1-1.noarch.rpm')) def test_suite(): - return unittest.makeSuite(BuildRpmTestCase) + return unittest.TestLoader().loadTestsFromTestCase(BuildRpmTestCase) if __name__ == '__main__': run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_bdist_wininst.py b/setuptools/_distutils/tests/test_bdist_wininst.py index 31cf2628..59f25167 100644 --- a/setuptools/_distutils/tests/test_bdist_wininst.py +++ b/setuptools/_distutils/tests/test_bdist_wininst.py @@ -34,7 +34,7 @@ class BuildWinInstTestCase(support.TempdirManager, self.assertGreater(len(exe_file), 10) def test_suite(): - return unittest.makeSuite(BuildWinInstTestCase) + return unittest.TestLoader().loadTestsFromTestCase(BuildWinInstTestCase) if __name__ == '__main__': run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_build.py b/setuptools/_distutils/tests/test_build.py index b020a5ba..93724419 100644 --- a/setuptools/_distutils/tests/test_build.py +++ b/setuptools/_distutils/tests/test_build.py @@ -24,10 +24,10 @@ class BuildTestCase(support.TempdirManager, wanted = os.path.join(cmd.build_base, 'lib') self.assertEqual(cmd.build_purelib, wanted) - # build_platlib is 'build/lib.platform-x.x[-pydebug]' + # build_platlib is 'build/lib.platform-cache_tag[-pydebug]' # examples: - # build/lib.macosx-10.3-i386-2.7 - plat_spec = '.%s-%d.%d' % (cmd.plat_name, *sys.version_info[:2]) + # build/lib.macosx-10.3-i386-cpython39 + plat_spec = '.%s-%s' % (cmd.plat_name, sys.implementation.cache_tag) if hasattr(sys, 'gettotalrefcount'): self.assertTrue(cmd.build_platlib.endswith('-pydebug')) plat_spec += '-pydebug' @@ -50,7 +50,7 @@ class BuildTestCase(support.TempdirManager, self.assertEqual(cmd.executable, os.path.normpath(sys.executable)) def test_suite(): - return unittest.makeSuite(BuildTestCase) + return unittest.TestLoader().loadTestsFromTestCase(BuildTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_build_clib.py b/setuptools/_distutils/tests/test_build_clib.py index 259c4352..d50ead7c 100644 --- a/setuptools/_distutils/tests/test_build_clib.py +++ b/setuptools/_distutils/tests/test_build_clib.py @@ -130,7 +130,7 @@ class BuildCLibTestCase(support.TempdirManager, self.assertIn('libfoo.a', os.listdir(build_temp)) def test_suite(): - return unittest.makeSuite(BuildCLibTestCase) + return unittest.TestLoader().loadTestsFromTestCase(BuildCLibTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_build_ext.py b/setuptools/_distutils/tests/test_build_ext.py index 85ecf4b7..920e4dc8 100644 --- a/setuptools/_distutils/tests/test_build_ext.py +++ b/setuptools/_distutils/tests/test_build_ext.py @@ -493,12 +493,16 @@ class BuildExtTestCase(TempdirManager, # format the target value as defined in the Apple # Availability Macros. We can't use the macro names since # at least one value we test with will not exist yet. - if target[1] < 10: + if target[:2] < (10, 10): # for 10.1 through 10.9.x -> "10n0" target = '%02d%01d0' % target else: # for 10.10 and beyond -> "10nn00" - target = '%02d%02d00' % target + if len(target) >= 2: + target = '%02d%02d00' % target + else: + # 11 and later can have no minor version (11 instead of 11.0) + target = '%02d0000' % target deptarget_ext = Extension( 'deptarget', [deptarget_c], @@ -538,8 +542,8 @@ class ParallelBuildExtTestCase(BuildExtTestCase): def test_suite(): suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(BuildExtTestCase)) - suite.addTest(unittest.makeSuite(ParallelBuildExtTestCase)) + suite.addTest(unittest.TestLoader().loadTestsFromTestCase(BuildExtTestCase)) + suite.addTest(unittest.TestLoader().loadTestsFromTestCase(ParallelBuildExtTestCase)) return suite if __name__ == '__main__': diff --git a/setuptools/_distutils/tests/test_build_py.py b/setuptools/_distutils/tests/test_build_py.py index 0712e92c..a590a485 100644 --- a/setuptools/_distutils/tests/test_build_py.py +++ b/setuptools/_distutils/tests/test_build_py.py @@ -173,7 +173,7 @@ class BuildPyTestCase(support.TempdirManager, def test_suite(): - return unittest.makeSuite(BuildPyTestCase) + return unittest.TestLoader().loadTestsFromTestCase(BuildPyTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_build_scripts.py b/setuptools/_distutils/tests/test_build_scripts.py index 954fc763..f299e51e 100644 --- a/setuptools/_distutils/tests/test_build_scripts.py +++ b/setuptools/_distutils/tests/test_build_scripts.py @@ -106,7 +106,7 @@ class BuildScriptsTestCase(support.TempdirManager, self.assertIn(name, built) def test_suite(): - return unittest.makeSuite(BuildScriptsTestCase) + return unittest.TestLoader().loadTestsFromTestCase(BuildScriptsTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_check.py b/setuptools/_distutils/tests/test_check.py index e534aca1..b41dba3d 100644 --- a/setuptools/_distutils/tests/test_check.py +++ b/setuptools/_distutils/tests/test_check.py @@ -71,6 +71,28 @@ class CheckTestCase(support.LoggingSilencer, cmd = self._run(metadata) self.assertEqual(cmd._warnings, 0) + def test_check_author_maintainer(self): + for kind in ("author", "maintainer"): + # ensure no warning when author_email or maintainer_email is given + # (the spec allows these fields to take the form "Name <email>") + metadata = {'url': 'xxx', + kind + '_email': 'Name <name@email.com>', + 'name': 'xxx', 'version': 'xxx'} + cmd = self._run(metadata) + self.assertEqual(cmd._warnings, 0) + + # the check should warn if only email is given and it does not + # contain the name + metadata[kind + '_email'] = 'name@email.com' + cmd = self._run(metadata) + self.assertEqual(cmd._warnings, 1) + + # the check should warn if only the name is given + metadata[kind] = "Name" + del metadata[kind + '_email'] + cmd = self._run(metadata) + self.assertEqual(cmd._warnings, 1) + @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils") def test_check_document(self): pkg_info, dist = self.create_dist() @@ -157,7 +179,7 @@ class CheckTestCase(support.LoggingSilencer, 'restructuredtext': 1}) def test_suite(): - return unittest.makeSuite(CheckTestCase) + return unittest.TestLoader().loadTestsFromTestCase(CheckTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_clean.py b/setuptools/_distutils/tests/test_clean.py index c605afd8..92367499 100644 --- a/setuptools/_distutils/tests/test_clean.py +++ b/setuptools/_distutils/tests/test_clean.py @@ -43,7 +43,7 @@ class cleanTestCase(support.TempdirManager, cmd.run() def test_suite(): - return unittest.makeSuite(cleanTestCase) + return unittest.TestLoader().loadTestsFromTestCase(cleanTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_cmd.py b/setuptools/_distutils/tests/test_cmd.py index cf5197c3..2319214a 100644 --- a/setuptools/_distutils/tests/test_cmd.py +++ b/setuptools/_distutils/tests/test_cmd.py @@ -120,7 +120,7 @@ class CommandTestCase(unittest.TestCase): debug.DEBUG = False def test_suite(): - return unittest.makeSuite(CommandTestCase) + return unittest.TestLoader().loadTestsFromTestCase(CommandTestCase) if __name__ == '__main__': run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_config.py b/setuptools/_distutils/tests/test_config.py index 344084af..27bd9d44 100644 --- a/setuptools/_distutils/tests/test_config.py +++ b/setuptools/_distutils/tests/test_config.py @@ -66,7 +66,7 @@ class BasePyPIRCCommandTestCase(support.TempdirManager, class command(PyPIRCCommand): def __init__(self, dist): - PyPIRCCommand.__init__(self, dist) + super().__init__(dist) def initialize_options(self): pass finalize_options = initialize_options @@ -135,7 +135,7 @@ class PyPIRCCommandTestCase(BasePyPIRCCommandTestCase): def test_suite(): - return unittest.makeSuite(PyPIRCCommandTestCase) + return unittest.TestLoader().loadTestsFromTestCase(PyPIRCCommandTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_config_cmd.py b/setuptools/_distutils/tests/test_config_cmd.py index 4cd9a6b9..2c84719a 100644 --- a/setuptools/_distutils/tests/test_config_cmd.py +++ b/setuptools/_distutils/tests/test_config_cmd.py @@ -92,7 +92,7 @@ class ConfigTestCase(support.LoggingSilencer, self.assertFalse(os.path.exists(f)) def test_suite(): - return unittest.makeSuite(ConfigTestCase) + return unittest.TestLoader().loadTestsFromTestCase(ConfigTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_core.py b/setuptools/_distutils/tests/test_core.py index d99cfd26..7270d699 100644 --- a/setuptools/_distutils/tests/test_core.py +++ b/setuptools/_distutils/tests/test_core.py @@ -159,7 +159,7 @@ class CoreTestCase(support.EnvironGuard, unittest.TestCase): self.assertEqual(stdout.readlines()[0], wanted) def test_suite(): - return unittest.makeSuite(CoreTestCase) + return unittest.TestLoader().loadTestsFromTestCase(CoreTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_cygwinccompiler.py b/setuptools/_distutils/tests/test_cygwinccompiler.py index 2a02eed4..8715a535 100644 --- a/setuptools/_distutils/tests/test_cygwinccompiler.py +++ b/setuptools/_distutils/tests/test_cygwinccompiler.py @@ -2,28 +2,14 @@ import unittest import sys import os -from io import BytesIO from test.support import run_unittest -from distutils import cygwinccompiler from distutils.cygwinccompiler import (check_config_h, CONFIG_H_OK, CONFIG_H_NOTOK, - CONFIG_H_UNCERTAIN, get_versions, + CONFIG_H_UNCERTAIN, get_msvcr) from distutils.tests import support -class FakePopen(object): - test_class = None - - def __init__(self, cmd, shell, stdout): - self.cmd = cmd.split()[0] - exes = self.test_class._exes - if self.cmd in exes: - # issue #6438 in Python 3.x, Popen returns bytes - self.stdout = BytesIO(exes[self.cmd]) - else: - self.stdout = os.popen(cmd, 'r') - class CygwinCCompilerTestCase(support.TempdirManager, unittest.TestCase): @@ -35,29 +21,16 @@ class CygwinCCompilerTestCase(support.TempdirManager, from distutils import sysconfig self.old_get_config_h_filename = sysconfig.get_config_h_filename sysconfig.get_config_h_filename = self._get_config_h_filename - self.old_find_executable = cygwinccompiler.find_executable - cygwinccompiler.find_executable = self._find_executable - self._exes = {} - self.old_popen = cygwinccompiler.Popen - FakePopen.test_class = self - cygwinccompiler.Popen = FakePopen def tearDown(self): sys.version = self.version from distutils import sysconfig sysconfig.get_config_h_filename = self.old_get_config_h_filename - cygwinccompiler.find_executable = self.old_find_executable - cygwinccompiler.Popen = self.old_popen super(CygwinCCompilerTestCase, self).tearDown() def _get_config_h_filename(self): return self.python_h - def _find_executable(self, name): - if name in self._exes: - return name - return None - def test_check_config_h(self): # check_config_h looks for "GCC" in sys.version first @@ -81,40 +54,6 @@ class CygwinCCompilerTestCase(support.TempdirManager, self.write_file(self.python_h, 'xxx __GNUC__ xxx') self.assertEqual(check_config_h()[0], CONFIG_H_OK) - def test_get_versions(self): - - # get_versions calls distutils.spawn.find_executable on - # 'gcc', 'ld' and 'dllwrap' - self.assertEqual(get_versions(), (None, None, None)) - - # Let's fake we have 'gcc' and it returns '3.4.5' - self._exes['gcc'] = b'gcc (GCC) 3.4.5 (mingw special)\nFSF' - res = get_versions() - self.assertEqual(str(res[0]), '3.4.5') - - # and let's see what happens when the version - # doesn't match the regular expression - # (\d+\.\d+(\.\d+)*) - self._exes['gcc'] = b'very strange output' - res = get_versions() - self.assertEqual(res[0], None) - - # same thing for ld - self._exes['ld'] = b'GNU ld version 2.17.50 20060824' - res = get_versions() - self.assertEqual(str(res[1]), '2.17.50') - self._exes['ld'] = b'@(#)PROGRAM:ld PROJECT:ld64-77' - res = get_versions() - self.assertEqual(res[1], None) - - # and dllwrap - self._exes['dllwrap'] = b'GNU dllwrap 2.17.50 20060824\nFSF' - res = get_versions() - self.assertEqual(str(res[2]), '2.17.50') - self._exes['dllwrap'] = b'Cheese Wrap' - res = get_versions() - self.assertEqual(res[2], None) - def test_get_msvcr(self): # none @@ -151,7 +90,7 @@ class CygwinCCompilerTestCase(support.TempdirManager, self.assertRaises(ValueError, get_msvcr) def test_suite(): - return unittest.makeSuite(CygwinCCompilerTestCase) + return unittest.TestLoader().loadTestsFromTestCase(CygwinCCompilerTestCase) if __name__ == '__main__': run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_dep_util.py b/setuptools/_distutils/tests/test_dep_util.py index c6fae39c..0d52740a 100644 --- a/setuptools/_distutils/tests/test_dep_util.py +++ b/setuptools/_distutils/tests/test_dep_util.py @@ -74,7 +74,7 @@ class DepUtilTestCase(support.TempdirManager, unittest.TestCase): def test_suite(): - return unittest.makeSuite(DepUtilTestCase) + return unittest.TestLoader().loadTestsFromTestCase(DepUtilTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_dir_util.py b/setuptools/_distutils/tests/test_dir_util.py index d436cf83..1b1f3bbb 100644 --- a/setuptools/_distutils/tests/test_dir_util.py +++ b/setuptools/_distutils/tests/test_dir_util.py @@ -133,7 +133,7 @@ class DirUtilTestCase(support.TempdirManager, unittest.TestCase): def test_suite(): - return unittest.makeSuite(DirUtilTestCase) + return unittest.TestLoader().loadTestsFromTestCase(DirUtilTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_dist.py b/setuptools/_distutils/tests/test_dist.py index 45eadee8..36155be1 100644 --- a/setuptools/_distutils/tests/test_dist.py +++ b/setuptools/_distutils/tests/test_dist.py @@ -525,8 +525,8 @@ class MetadataTestCase(support.TempdirManager, support.EnvironGuard, def test_suite(): suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(DistributionTestCase)) - suite.addTest(unittest.makeSuite(MetadataTestCase)) + suite.addTest(unittest.TestLoader().loadTestsFromTestCase(DistributionTestCase)) + suite.addTest(unittest.TestLoader().loadTestsFromTestCase(MetadataTestCase)) return suite if __name__ == "__main__": diff --git a/setuptools/_distutils/tests/test_extension.py b/setuptools/_distutils/tests/test_extension.py index 2eb5b422..78a55daa 100644 --- a/setuptools/_distutils/tests/test_extension.py +++ b/setuptools/_distutils/tests/test_extension.py @@ -65,7 +65,7 @@ class ExtensionTestCase(unittest.TestCase): "Unknown Extension options: 'chic'") def test_suite(): - return unittest.makeSuite(ExtensionTestCase) + return unittest.TestLoader().loadTestsFromTestCase(ExtensionTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_file_util.py b/setuptools/_distutils/tests/test_file_util.py index d2536075..81b90d6c 100644 --- a/setuptools/_distutils/tests/test_file_util.py +++ b/setuptools/_distutils/tests/test_file_util.py @@ -118,7 +118,7 @@ class FileUtilTestCase(support.TempdirManager, unittest.TestCase): def test_suite(): - return unittest.makeSuite(FileUtilTestCase) + return unittest.TestLoader().loadTestsFromTestCase(FileUtilTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_filelist.py b/setuptools/_distutils/tests/test_filelist.py index 9ec507b5..a90edcf1 100644 --- a/setuptools/_distutils/tests/test_filelist.py +++ b/setuptools/_distutils/tests/test_filelist.py @@ -344,8 +344,8 @@ class FindAllTestCase(unittest.TestCase): def test_suite(): return unittest.TestSuite([ - unittest.makeSuite(FileListTestCase), - unittest.makeSuite(FindAllTestCase), + unittest.TestLoader().loadTestsFromTestCase(FileListTestCase), + unittest.TestLoader().loadTestsFromTestCase(FindAllTestCase), ]) diff --git a/setuptools/_distutils/tests/test_install.py b/setuptools/_distutils/tests/test_install.py index cce973dc..3aef9e43 100644 --- a/setuptools/_distutils/tests/test_install.py +++ b/setuptools/_distutils/tests/test_install.py @@ -56,14 +56,15 @@ class InstallTestCase(support.TempdirManager, expected = os.path.normpath(expected) self.assertEqual(got, expected) - libdir = os.path.join(destination, "lib", "python") + impl_name = sys.implementation.name.replace("cpython", "python") + libdir = os.path.join(destination, "lib", impl_name) check_path(cmd.install_lib, libdir) _platlibdir = getattr(sys, "platlibdir", "lib") - platlibdir = os.path.join(destination, _platlibdir, "python") + platlibdir = os.path.join(destination, _platlibdir, impl_name) check_path(cmd.install_platlib, platlibdir) check_path(cmd.install_purelib, libdir) check_path(cmd.install_headers, - os.path.join(destination, "include", "python", "foopkg")) + os.path.join(destination, "include", impl_name, "foopkg")) check_path(cmd.install_scripts, os.path.join(destination, "bin")) check_path(cmd.install_data, destination) @@ -81,7 +82,9 @@ class InstallTestCase(support.TempdirManager, install_module.USER_SITE = self.user_site def _expanduser(path): - return self.tmpdir + if path.startswith('~'): + return os.path.normpath(self.tmpdir + path[1:]) + return path self.old_expand = os.path.expanduser os.path.expanduser = _expanduser @@ -122,6 +125,17 @@ class InstallTestCase(support.TempdirManager, self.assertIn('userbase', cmd.config_vars) self.assertIn('usersite', cmd.config_vars) + actual_headers = os.path.relpath(cmd.install_headers, self.user_base) + if os.name == 'nt': + site_path = os.path.relpath( + os.path.dirname(self.old_user_site), self.old_user_base) + include = os.path.join(site_path, 'Include') + else: + include = sysconfig.get_python_inc(0, '') + expect_headers = os.path.join(include, 'xx') + + self.assertEqual(os.path.normcase(actual_headers), os.path.normcase(expect_headers)) + def test_handle_extra_path(self): dist = Distribution({'name': 'xx', 'extra_path': 'path,dirs'}) cmd = install(dist) @@ -244,7 +258,7 @@ class InstallTestCase(support.TempdirManager, def test_suite(): - return unittest.makeSuite(InstallTestCase) + return unittest.TestLoader().loadTestsFromTestCase(InstallTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_install_data.py b/setuptools/_distutils/tests/test_install_data.py index 32ab296a..6191d2fa 100644 --- a/setuptools/_distutils/tests/test_install_data.py +++ b/setuptools/_distutils/tests/test_install_data.py @@ -69,7 +69,7 @@ class InstallDataTestCase(support.TempdirManager, self.assertTrue(os.path.exists(os.path.join(inst, rone))) def test_suite(): - return unittest.makeSuite(InstallDataTestCase) + return unittest.TestLoader().loadTestsFromTestCase(InstallDataTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_install_headers.py b/setuptools/_distutils/tests/test_install_headers.py index 2217b321..1aa4d09c 100644 --- a/setuptools/_distutils/tests/test_install_headers.py +++ b/setuptools/_distutils/tests/test_install_headers.py @@ -33,7 +33,7 @@ class InstallHeadersTestCase(support.TempdirManager, self.assertEqual(len(cmd.get_outputs()), 2) def test_suite(): - return unittest.makeSuite(InstallHeadersTestCase) + return unittest.TestLoader().loadTestsFromTestCase(InstallHeadersTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_install_lib.py b/setuptools/_distutils/tests/test_install_lib.py index fda6315b..652653f2 100644 --- a/setuptools/_distutils/tests/test_install_lib.py +++ b/setuptools/_distutils/tests/test_install_lib.py @@ -109,7 +109,7 @@ class InstallLibTestCase(support.TempdirManager, def test_suite(): - return unittest.makeSuite(InstallLibTestCase) + return unittest.TestLoader().loadTestsFromTestCase(InstallLibTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_install_scripts.py b/setuptools/_distutils/tests/test_install_scripts.py index 1f7b1038..648db3b1 100644 --- a/setuptools/_distutils/tests/test_install_scripts.py +++ b/setuptools/_distutils/tests/test_install_scripts.py @@ -76,7 +76,7 @@ class InstallScriptsTestCase(support.TempdirManager, def test_suite(): - return unittest.makeSuite(InstallScriptsTestCase) + return unittest.TestLoader().loadTestsFromTestCase(InstallScriptsTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_log.py b/setuptools/_distutils/tests/test_log.py index 75cf9006..ec2ae028 100644 --- a/setuptools/_distutils/tests/test_log.py +++ b/setuptools/_distutils/tests/test_log.py @@ -40,7 +40,7 @@ class TestLog(unittest.TestCase): 'Fαtal\t\\xc8rr\\u014dr') def test_suite(): - return unittest.makeSuite(TestLog) + return unittest.TestLoader().loadTestsFromTestCase(TestLog) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_msvc9compiler.py b/setuptools/_distutils/tests/test_msvc9compiler.py index 77a07ef3..6235405e 100644 --- a/setuptools/_distutils/tests/test_msvc9compiler.py +++ b/setuptools/_distutils/tests/test_msvc9compiler.py @@ -178,7 +178,7 @@ class msvc9compilerTestCase(support.TempdirManager, def test_suite(): - return unittest.makeSuite(msvc9compilerTestCase) + return unittest.TestLoader().loadTestsFromTestCase(msvc9compilerTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_msvccompiler.py b/setuptools/_distutils/tests/test_msvccompiler.py index 46a51cd0..846e5bb8 100644 --- a/setuptools/_distutils/tests/test_msvccompiler.py +++ b/setuptools/_distutils/tests/test_msvccompiler.py @@ -98,7 +98,7 @@ class TestSpawn(unittest.TestCase): compiler = _msvccompiler.MSVCCompiler() compiler._paths = "expected" inner_cmd = 'import os; assert os.environ["PATH"] == "expected"' - command = ['python', '-c', inner_cmd] + command = [sys.executable, '-c', inner_cmd] threads = [ CheckThread(target=compiler.spawn, args=[command]) @@ -132,7 +132,7 @@ class TestSpawn(unittest.TestCase): def test_suite(): - return unittest.makeSuite(msvccompilerTestCase) + return unittest.TestLoader().loadTestsFromTestCase(msvccompilerTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_register.py b/setuptools/_distutils/tests/test_register.py index 84607f99..5770ed58 100644 --- a/setuptools/_distutils/tests/test_register.py +++ b/setuptools/_distutils/tests/test_register.py @@ -319,7 +319,7 @@ class RegisterTestCase(BasePyPIRCCommandTestCase): def test_suite(): - return unittest.makeSuite(RegisterTestCase) + return unittest.TestLoader().loadTestsFromTestCase(RegisterTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_sdist.py b/setuptools/_distutils/tests/test_sdist.py index b087a817..4c51717c 100644 --- a/setuptools/_distutils/tests/test_sdist.py +++ b/setuptools/_distutils/tests/test_sdist.py @@ -7,6 +7,7 @@ import zipfile from os.path import join from textwrap import dedent from test.support import captured_stdout, run_unittest +from .unix_compat import require_unix_id, require_uid_0, pwd, grp from .py38compat import check_warnings @@ -16,13 +17,6 @@ try: except ImportError: ZLIB_SUPPORT = False -try: - import grp - import pwd - UID_GID_SUPPORT = True -except ImportError: - UID_GID_SUPPORT = False - from distutils.command.sdist import sdist, show_formats from distutils.core import Distribution from distutils.tests.test_config import BasePyPIRCCommandTestCase @@ -440,7 +434,8 @@ class SDistTestCase(BasePyPIRCCommandTestCase): 'fake-1.0/README.manual']) @unittest.skipUnless(ZLIB_SUPPORT, "requires zlib") - @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support") + @require_unix_id + @require_uid_0 @unittest.skipIf(find_executable('tar') is None, "The tar command is not found") @unittest.skipIf(find_executable('gzip') is None, @@ -488,7 +483,7 @@ class SDistTestCase(BasePyPIRCCommandTestCase): archive.close() def test_suite(): - return unittest.makeSuite(SDistTestCase) + return unittest.TestLoader().loadTestsFromTestCase(SDistTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_spawn.py b/setuptools/_distutils/tests/test_spawn.py index f620da78..c5ed8e2b 100644 --- a/setuptools/_distutils/tests/test_spawn.py +++ b/setuptools/_distutils/tests/test_spawn.py @@ -133,7 +133,7 @@ class SpawnTestCase(support.TempdirManager, def test_suite(): - return unittest.makeSuite(SpawnTestCase) + return unittest.TestLoader().loadTestsFromTestCase(SpawnTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_sysconfig.py b/setuptools/_distutils/tests/test_sysconfig.py index 80cd1599..e671f9e0 100644 --- a/setuptools/_distutils/tests/test_sysconfig.py +++ b/setuptools/_distutils/tests/test_sysconfig.py @@ -38,6 +38,14 @@ class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): config_h = sysconfig.get_config_h_filename() self.assertTrue(os.path.isfile(config_h), config_h) + @unittest.skipIf(sys.platform == 'win32', + 'Makefile only exists on Unix like systems') + @unittest.skipIf(sys.implementation.name != 'cpython', + 'Makefile only exists in CPython') + def test_get_makefile_filename(self): + makefile = sysconfig.get_makefile_filename() + self.assertTrue(os.path.isfile(makefile), makefile) + def test_get_python_lib(self): # XXX doesn't work on Linux when Python was never installed before #self.assertTrue(os.path.isdir(lib_dir), lib_dir) @@ -283,10 +291,27 @@ class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): outs, errs = p.communicate() self.assertEqual(0, p.returncode, "Subprocess failed: " + outs) + def test_parse_config_h(self): + config_h = sysconfig.get_config_h_filename() + input = {} + with open(config_h, encoding="utf-8") as f: + result = sysconfig.parse_config_h(f, g=input) + self.assertTrue(input is result) + with open(config_h, encoding="utf-8") as f: + result = sysconfig.parse_config_h(f) + self.assertTrue(isinstance(result, dict)) + + @unittest.skipUnless(sys.platform == 'win32', + 'Testing windows pyd suffix') + @unittest.skipUnless(sys.implementation.name == 'cpython', + 'Need cpython for this test') + def test_win_ext_suffix(self): + self.assertTrue(sysconfig.get_config_var("EXT_SUFFIX").endswith(".pyd")) + self.assertNotEqual(sysconfig.get_config_var("EXT_SUFFIX"), ".pyd") def test_suite(): suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(SysconfigTestCase)) + suite.addTest(unittest.TestLoader().loadTestsFromTestCase(SysconfigTestCase)) return suite diff --git a/setuptools/_distutils/tests/test_text_file.py b/setuptools/_distutils/tests/test_text_file.py index 7e76240a..ebac3d52 100644 --- a/setuptools/_distutils/tests/test_text_file.py +++ b/setuptools/_distutils/tests/test_text_file.py @@ -101,7 +101,7 @@ class TextFileTestCase(support.TempdirManager, unittest.TestCase): in_file.close() def test_suite(): - return unittest.makeSuite(TextFileTestCase) + return unittest.TestLoader().loadTestsFromTestCase(TextFileTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_unixccompiler.py b/setuptools/_distutils/tests/test_unixccompiler.py index 63c7dd37..c8b4c149 100644 --- a/setuptools/_distutils/tests/test_unixccompiler.py +++ b/setuptools/_distutils/tests/test_unixccompiler.py @@ -3,6 +3,7 @@ import os import sys import unittest from test.support import run_unittest +from unittest.mock import patch from .py38compat import EnvironmentVarGuard @@ -11,9 +12,12 @@ from distutils.errors import DistutilsPlatformError from distutils.unixccompiler import UnixCCompiler from distutils.util import _clear_cached_macosx_ver -class UnixCCompilerTestCase(unittest.TestCase): +from . import support + +class UnixCCompilerTestCase(support.TempdirManager, unittest.TestCase): def setUp(self): + super().setUp() self._backup_platform = sys.platform self._backup_get_config_var = sysconfig.get_config_var self._backup_get_config_vars = sysconfig.get_config_vars @@ -23,6 +27,7 @@ class UnixCCompilerTestCase(unittest.TestCase): self.cc = CompilerWrapper() def tearDown(self): + super().tearDown() sys.platform = self._backup_platform sysconfig.get_config_var = self._backup_get_config_var sysconfig.get_config_vars = self._backup_get_config_vars @@ -211,6 +216,42 @@ class UnixCCompilerTestCase(unittest.TestCase): self.assertEqual(self.cc.linker_so[0], 'my_cc') @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") + def test_cc_overrides_ldshared_for_cxx_correctly(self): + """ + Ensure that setting CC env variable also changes default linker + correctly when building C++ extensions. + + pypa/distutils#126 + """ + def gcv(v): + if v == 'LDSHARED': + return 'gcc-4.2 -bundle -undefined dynamic_lookup ' + elif v == 'CXX': + return 'g++-4.2' + return 'gcc-4.2' + + def gcvs(*args, _orig=sysconfig.get_config_vars): + if args: + return list(map(sysconfig.get_config_var, args)) + return _orig() + + sysconfig.get_config_var = gcv + sysconfig.get_config_vars = gcvs + with patch.object(self.cc, 'spawn', return_value=None) as mock_spawn, \ + patch.object(self.cc, '_need_link', return_value=True), \ + patch.object(self.cc, 'mkpath', return_value=None), \ + EnvironmentVarGuard() as env: + env['CC'] = 'ccache my_cc' + env['CXX'] = 'my_cxx' + del env['LDSHARED'] + sysconfig.customize_compiler(self.cc) + self.assertEqual(self.cc.linker_so[0:2], ['ccache', 'my_cc']) + self.cc.link(None, [], 'a.out', target_lang='c++') + call_args = mock_spawn.call_args[0][0] + expected = ['my_cxx', '-bundle', '-undefined', 'dynamic_lookup'] + assert call_args[:4] == expected + + @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") def test_explicit_ldshared(self): # Issue #18080: # ensure that setting CC env variable does not change @@ -237,11 +278,12 @@ class UnixCCompilerTestCase(unittest.TestCase): # ensure that setting output_dir does not raise # FileNotFoundError: [Errno 2] No such file or directory: 'a.out' self.cc.output_dir = 'scratch' + os.chdir(self.mkdtemp()) self.cc.has_function('abort', includes=['stdlib.h']) def test_suite(): - return unittest.makeSuite(UnixCCompilerTestCase) + return unittest.TestLoader().loadTestsFromTestCase(UnixCCompilerTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_upload.py b/setuptools/_distutils/tests/test_upload.py index bca5516d..ce3e84a2 100644 --- a/setuptools/_distutils/tests/test_upload.py +++ b/setuptools/_distutils/tests/test_upload.py @@ -217,7 +217,7 @@ class uploadTestCase(BasePyPIRCCommandTestCase): def test_suite(): - return unittest.makeSuite(uploadTestCase) + return unittest.TestLoader().loadTestsFromTestCase(uploadTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_util.py b/setuptools/_distutils/tests/test_util.py index bf0d4333..2738388e 100644 --- a/setuptools/_distutils/tests/test_util.py +++ b/setuptools/_distutils/tests/test_util.py @@ -2,6 +2,7 @@ import os import sys import unittest +import sysconfig as stdlib_sysconfig from copy import copy from test.support import run_unittest from unittest import mock @@ -10,12 +11,10 @@ from distutils.errors import DistutilsPlatformError, DistutilsByteCompileError from distutils.util import (get_platform, convert_path, change_root, check_environ, split_quoted, strtobool, rfc822_escape, byte_compile, - grok_environment_error) + grok_environment_error, get_host_platform) from distutils import util # used to patch _environ_checked -from distutils.sysconfig import get_config_vars from distutils import sysconfig from distutils.tests import support -import _osx_support class UtilTestCase(support.EnvironGuard, unittest.TestCase): @@ -63,110 +62,26 @@ class UtilTestCase(support.EnvironGuard, unittest.TestCase): def _get_uname(self): return self._uname - def test_get_platform(self): - - # windows XP, 32bits - os.name = 'nt' - sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' - '[MSC v.1310 32 bit (Intel)]') - sys.platform = 'win32' - self.assertEqual(get_platform(), 'win32') - - # windows XP, amd64 - os.name = 'nt' - sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' - '[MSC v.1310 32 bit (Amd64)]') - sys.platform = 'win32' - self.assertEqual(get_platform(), 'win-amd64') - - # macbook - os.name = 'posix' - sys.version = ('2.5 (r25:51918, Sep 19 2006, 08:49:13) ' - '\n[GCC 4.0.1 (Apple Computer, Inc. build 5341)]') - sys.platform = 'darwin' - self._set_uname(('Darwin', 'macziade', '8.11.1', - ('Darwin Kernel Version 8.11.1: ' - 'Wed Oct 10 18:23:28 PDT 2007; ' - 'root:xnu-792.25.20~1/RELEASE_I386'), 'i386')) - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3' - - get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g ' - '-fwrapv -O3 -Wall -Wstrict-prototypes') - - cursize = sys.maxsize - sys.maxsize = (2 ** 31)-1 - try: - self.assertEqual(get_platform(), 'macosx-10.3-i386') - finally: - sys.maxsize = cursize - - # macbook with fat binaries (fat, universal or fat64) - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.4' - get_config_vars()['CFLAGS'] = ('-arch ppc -arch i386 -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3') - - self.assertEqual(get_platform(), 'macosx-10.4-fat') - - _osx_support._remove_original_values(get_config_vars()) - os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.1' - self.assertEqual(get_platform(), 'macosx-10.4-fat') - + def test_get_host_platform(self): + with unittest.mock.patch('os.name', 'nt'): + with unittest.mock.patch('sys.version', '... [... (ARM64)]'): + self.assertEqual(get_host_platform(), 'win-arm64') + with unittest.mock.patch('sys.version', '... [... (ARM)]'): + self.assertEqual(get_host_platform(), 'win-arm32') - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch i386 -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3') + with unittest.mock.patch('sys.version_info', (3, 9, 0, 'final', 0)): + self.assertEqual(get_host_platform(), stdlib_sysconfig.get_platform()) - self.assertEqual(get_platform(), 'macosx-10.4-intel') - - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc -arch i386 -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3') - self.assertEqual(get_platform(), 'macosx-10.4-fat3') - - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['CFLAGS'] = ('-arch ppc64 -arch x86_64 -arch ppc -arch i386 -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3') - self.assertEqual(get_platform(), 'macosx-10.4-universal') - - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc64 -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3') - - self.assertEqual(get_platform(), 'macosx-10.4-fat64') - - for arch in ('ppc', 'i386', 'x86_64', 'ppc64'): - _osx_support._remove_original_values(get_config_vars()) - get_config_vars()['CFLAGS'] = ('-arch %s -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk ' - '-fno-strict-aliasing -fno-common ' - '-dynamic -DNDEBUG -g -O3'%(arch,)) - - self.assertEqual(get_platform(), 'macosx-10.4-%s'%(arch,)) - - - # linux debian sarge - os.name = 'posix' - sys.version = ('2.3.5 (#1, Jul 4 2007, 17:28:59) ' - '\n[GCC 4.1.2 20061115 (prerelease) (Debian 4.1.1-21)]') - sys.platform = 'linux2' - self._set_uname(('Linux', 'aglae', '2.6.21.1dedibox-r7', - '#1 Mon Apr 30 17:25:38 CEST 2007', 'i686')) - - self.assertEqual(get_platform(), 'linux-i686') - - # XXX more platforms to tests here + def test_get_platform(self): + with unittest.mock.patch('os.name', 'nt'): + with unittest.mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'x86'}): + self.assertEqual(get_platform(), 'win32') + with unittest.mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'x64'}): + self.assertEqual(get_platform(), 'win-amd64') + with unittest.mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm'}): + self.assertEqual(get_platform(), 'win-arm32') + with unittest.mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm64'}): + self.assertEqual(get_platform(), 'win-arm64') def test_convert_path(self): # linux/mac @@ -303,7 +218,7 @@ class UtilTestCase(support.EnvironGuard, unittest.TestCase): def test_suite(): - return unittest.makeSuite(UtilTestCase) + return unittest.TestLoader().loadTestsFromTestCase(UtilTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_version.py b/setuptools/_distutils/tests/test_version.py index 8671cd2f..8405aa3a 100644 --- a/setuptools/_distutils/tests/test_version.py +++ b/setuptools/_distutils/tests/test_version.py @@ -1,11 +1,19 @@ """Tests for distutils.version.""" import unittest +import distutils from distutils.version import LooseVersion from distutils.version import StrictVersion from test.support import run_unittest class VersionTestCase(unittest.TestCase): + def setUp(self): + self.ctx = distutils.version.suppress_known_deprecation() + self.ctx.__enter__() + + def tearDown(self): + self.ctx.__exit__(None, None, None) + def test_prerelease(self): version = StrictVersion('1.2.3a1') self.assertEqual(version.version, (1, 2, 3)) @@ -81,7 +89,7 @@ class VersionTestCase(unittest.TestCase): (v1, v2, res)) def test_suite(): - return unittest.makeSuite(VersionTestCase) + return unittest.TestLoader().loadTestsFromTestCase(VersionTestCase) if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/unix_compat.py b/setuptools/_distutils/tests/unix_compat.py new file mode 100644 index 00000000..b7718c26 --- /dev/null +++ b/setuptools/_distutils/tests/unix_compat.py @@ -0,0 +1,16 @@ +import sys +import unittest + +try: + import grp + import pwd +except ImportError: + grp = pwd = None + + +UNIX_ID_SUPPORT = grp and pwd +UID_0_SUPPORT = UNIX_ID_SUPPORT and sys.platform != "cygwin" + +require_unix_id = unittest.skipUnless( + UNIX_ID_SUPPORT, "Requires grp and pwd support") +require_uid_0 = unittest.skipUnless(UID_0_SUPPORT, "Requires UID 0 support") diff --git a/setuptools/_distutils/unixccompiler.py b/setuptools/_distutils/unixccompiler.py index a07e5988..715408f5 100644 --- a/setuptools/_distutils/unixccompiler.py +++ b/setuptools/_distutils/unixccompiler.py @@ -22,9 +22,7 @@ from distutils.ccompiler import \ from distutils.errors import \ DistutilsExecError, CompileError, LibError, LinkError from distutils import log - -if sys.platform == 'darwin': - import _osx_support +from ._macos_compat import compiler_fixup # XXX Things not currently handled: # * optimization/debug/warning flags; we just use whatever's in Python's @@ -42,6 +40,66 @@ if sys.platform == 'darwin': # options and carry on. +def _split_env(cmd): + """ + For macOS, split command into 'env' portion (if any) + and the rest of the linker command. + + >>> _split_env(['a', 'b', 'c']) + ([], ['a', 'b', 'c']) + >>> _split_env(['/usr/bin/env', 'A=3', 'gcc']) + (['/usr/bin/env', 'A=3'], ['gcc']) + """ + pivot = 0 + if os.path.basename(cmd[0]) == "env": + pivot = 1 + while '=' in cmd[pivot]: + pivot += 1 + return cmd[:pivot], cmd[pivot:] + + +def _split_aix(cmd): + """ + AIX platforms prefix the compiler with the ld_so_aix + script, so split that from the linker command. + + >>> _split_aix(['a', 'b', 'c']) + ([], ['a', 'b', 'c']) + >>> _split_aix(['/bin/foo/ld_so_aix', 'gcc']) + (['/bin/foo/ld_so_aix'], ['gcc']) + """ + pivot = os.path.basename(cmd[0]) == 'ld_so_aix' + return cmd[:pivot], cmd[pivot:] + + +def _linker_params(linker_cmd, compiler_cmd): + """ + The linker command usually begins with the compiler + command (possibly multiple elements), followed by zero or more + params for shared library building. + + If the LDSHARED env variable overrides the linker command, + however, the commands may not match. + + Return the best guess of the linker parameters by stripping + the linker command. If the compiler command does not + match the linker command, assume the linker command is + just the first element. + + >>> _linker_params('gcc foo bar'.split(), ['gcc']) + ['foo', 'bar'] + >>> _linker_params('gcc foo bar'.split(), ['other']) + ['foo', 'bar'] + >>> _linker_params('ccache gcc foo bar'.split(), 'ccache gcc'.split()) + ['foo', 'bar'] + >>> _linker_params(['gcc'], ['gcc']) + [] + """ + c_len = len(compiler_cmd) + pivot = c_len if linker_cmd[:c_len] == compiler_cmd else 1 + return linker_cmd[pivot:] + + class UnixCCompiler(CCompiler): compiler_type = 'unix' @@ -109,10 +167,8 @@ class UnixCCompiler(CCompiler): raise CompileError(msg) def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - compiler_so = self.compiler_so - if sys.platform == 'darwin': - compiler_so = _osx_support.compiler_fixup(compiler_so, - cc_args + extra_postargs) + compiler_so = compiler_fixup( + self.compiler_so, cc_args + extra_postargs) try: self.spawn(compiler_so + cc_args + [src, '-o', obj] + extra_postargs) @@ -173,33 +229,22 @@ class UnixCCompiler(CCompiler): ld_args.extend(extra_postargs) self.mkpath(os.path.dirname(output_filename)) try: - if target_desc == CCompiler.EXECUTABLE: - linker = self.linker_exe[:] - else: - linker = self.linker_so[:] + # Select a linker based on context: linker_exe when + # building an executable or linker_so (with shared options) + # when building a shared library. + building_exe = target_desc == CCompiler.EXECUTABLE + linker = (self.linker_exe if building_exe else self.linker_so)[:] + if target_lang == "c++" and self.compiler_cxx: - # skip over environment variable settings if /usr/bin/env - # is used to set up the linker's environment. - # This is needed on OSX. Note: this assumes that the - # normal and C++ compiler have the same environment - # settings. - i = 0 - if os.path.basename(linker[0]) == "env": - i = 1 - while '=' in linker[i]: - i += 1 - - if os.path.basename(linker[i]) == 'ld_so_aix': - # AIX platforms prefix the compiler with the ld_so_aix - # script, so we need to adjust our linker index - offset = 1 - else: - offset = 0 - - linker[i+offset] = self.compiler_cxx[i] - - if sys.platform == 'darwin': - linker = _osx_support.compiler_fixup(linker, ld_args) + env, linker_ne = _split_env(linker) + aix, linker_na = _split_aix(linker_ne) + _, compiler_cxx_ne = _split_env(self.compiler_cxx) + _, linker_exe_ne = _split_env(self.linker_exe) + + params = _linker_params(linker_na, linker_exe_ne) + linker = env + aix + compiler_cxx_ne + params + + linker = compiler_fixup(linker, ld_args) self.spawn(linker + ld_args) except DistutilsExecError as msg: diff --git a/setuptools/_distutils/util.py b/setuptools/_distutils/util.py index ac6d446d..6d506d7e 100644 --- a/setuptools/_distutils/util.py +++ b/setuptools/_distutils/util.py @@ -9,6 +9,7 @@ import re import importlib.util import string import sys +import sysconfig from distutils.errors import DistutilsPlatformError from distutils.dep_util import newer from distutils.spawn import spawn @@ -20,82 +21,29 @@ from .py35compat import _optim_args_from_interpreter_flags def get_host_platform(): """Return a string that identifies the current platform. This is used mainly to distinguish platform-specific build directories and platform-specific built - distributions. Typically includes the OS name and version and the - architecture (as supplied by 'os.uname()'), although the exact information - included depends on the OS; eg. on Linux, the kernel version isn't - particularly important. - - Examples of returned values: - linux-i586 - linux-alpha (?) - solaris-2.6-sun4u - - Windows will return one of: - win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) - win32 (all others - specifically, sys.platform is returned) - - For other non-POSIX platforms, currently just returns 'sys.platform'. - + distributions. """ - if os.name == 'nt': - if 'amd64' in sys.version.lower(): - return 'win-amd64' - if '(arm)' in sys.version.lower(): - return 'win-arm32' - if '(arm64)' in sys.version.lower(): - return 'win-arm64' - return sys.platform - - # Set for cross builds explicitly - if "_PYTHON_HOST_PLATFORM" in os.environ: - return os.environ["_PYTHON_HOST_PLATFORM"] - - if os.name != "posix" or not hasattr(os, 'uname'): - # XXX what about the architecture? NT is Intel or Alpha, - # Mac OS is M68k or PPC, etc. - return sys.platform - - # Try to distinguish various flavours of Unix - - (osname, host, release, version, machine) = os.uname() - - # Convert the OS name to lowercase, remove '/' characters, and translate - # spaces (for "Power Macintosh") - osname = osname.lower().replace('/', '') - machine = machine.replace(' ', '_') - machine = machine.replace('/', '-') - - if osname[:5] == "linux": - # At least on Linux/Intel, 'machine' is the processor -- - # i386, etc. - # XXX what about Alpha, SPARC, etc? - return "%s-%s" % (osname, machine) - elif osname[:5] == "sunos": - if release[0] >= "5": # SunOS 5 == Solaris 2 - osname = "solaris" - release = "%d.%s" % (int(release[0]) - 3, release[2:]) - # We can't use "platform.architecture()[0]" because a - # bootstrap problem. We use a dict to get an error - # if some suspicious happens. - bitness = {2147483647:"32bit", 9223372036854775807:"64bit"} - machine += ".%s" % bitness[sys.maxsize] - # fall through to standard osname-release-machine representation - elif osname[:3] == "aix": - from .py38compat import aix_platform - return aix_platform(osname, version, release) - elif osname[:6] == "cygwin": - osname = "cygwin" - rel_re = re.compile (r'[\d.]+', re.ASCII) - m = rel_re.match(release) - if m: - release = m.group() - elif osname[:6] == "darwin": - import _osx_support, distutils.sysconfig - osname, release, machine = _osx_support.get_platform_osx( - distutils.sysconfig.get_config_vars(), - osname, release, machine) - - return "%s-%s-%s" % (osname, release, machine) + + # We initially exposed platforms as defined in Python 3.9 + # even with older Python versions when distutils was split out. + # Now that we delegate to stdlib sysconfig we need to restore this + # in case anyone has started to depend on it. + + if sys.version_info < (3, 8): + if os.name == 'nt': + if '(arm)' in sys.version.lower(): + return 'win-arm32' + if '(arm64)' in sys.version.lower(): + return 'win-arm64' + + if sys.version_info < (3, 9): + if os.name == "posix" and hasattr(os, 'uname'): + osname, host, release, version, machine = os.uname() + if osname[:3] == "aix": + from .py38compat import aix_platform + return aix_platform(osname, version, release) + + return sysconfig.get_platform() def get_platform(): if os.name == 'nt': diff --git a/setuptools/_distutils/version.py b/setuptools/_distutils/version.py index c33bebae..31f504e4 100644 --- a/setuptools/_distutils/version.py +++ b/setuptools/_distutils/version.py @@ -27,6 +27,20 @@ Every version number class implements the following interface: """ import re +import warnings +import contextlib + + +@contextlib.contextmanager +def suppress_known_deprecation(): + with warnings.catch_warnings(record=True) as ctx: + warnings.filterwarnings( + action='default', + category=DeprecationWarning, + message="distutils Version classes are deprecated.", + ) + yield ctx + class Version: """Abstract base class for version numbering classes. Just provides @@ -38,6 +52,12 @@ class Version: def __init__ (self, vstring=None): if vstring: self.parse(vstring) + warnings.warn( + "distutils Version classes are deprecated. " + "Use packaging.version instead.", + DeprecationWarning, + stacklevel=2, + ) def __repr__ (self): return "%s ('%s')" % (self.__class__.__name__, str(self)) @@ -165,7 +185,8 @@ class StrictVersion (Version): def _cmp (self, other): if isinstance(other, str): - other = StrictVersion(other) + with suppress_known_deprecation(): + other = StrictVersion(other) elif not isinstance(other, StrictVersion): return NotImplemented @@ -301,11 +322,6 @@ class LooseVersion (Version): component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE) - def __init__ (self, vstring=None): - if vstring: - self.parse(vstring) - - def parse (self, vstring): # I've given up on thinking I can reconstruct the version string # from the parsed tuple -- so I just store the string here for diff --git a/setuptools/_distutils/versionpredicate.py b/setuptools/_distutils/versionpredicate.py index 062c98f2..55f25d91 100644 --- a/setuptools/_distutils/versionpredicate.py +++ b/setuptools/_distutils/versionpredicate.py @@ -23,7 +23,9 @@ def splitUp(pred): if not res: raise ValueError("bad package restriction syntax: %r" % pred) comp, verStr = res.groups() - return (comp, distutils.version.StrictVersion(verStr)) + with distutils.version.suppress_known_deprecation(): + other = distutils.version.StrictVersion(verStr) + return (comp, other) compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq, ">": operator.gt, ">=": operator.ge, "!=": operator.ne} @@ -162,5 +164,6 @@ def split_provision(value): raise ValueError("illegal provides specification: %r" % value) ver = m.group(2) or None if ver: - ver = distutils.version.StrictVersion(ver) + with distutils.version.suppress_known_deprecation(): + ver = distutils.version.StrictVersion(ver) return m.group(1), ver diff --git a/setuptools/_entry_points.py b/setuptools/_entry_points.py new file mode 100644 index 00000000..f087681b --- /dev/null +++ b/setuptools/_entry_points.py @@ -0,0 +1,86 @@ +import functools +import operator +import itertools + +from .extern.jaraco.text import yield_lines +from .extern.jaraco.functools import pass_none +from ._importlib import metadata +from ._itertools import ensure_unique +from .extern.more_itertools import consume + + +def ensure_valid(ep): + """ + Exercise one of the dynamic properties to trigger + the pattern match. + """ + ep.extras + + +def load_group(value, group): + """ + Given a value of an entry point or series of entry points, + return each as an EntryPoint. + """ + # normalize to a single sequence of lines + lines = yield_lines(value) + text = f'[{group}]\n' + '\n'.join(lines) + return metadata.EntryPoints._from_text(text) + + +def by_group_and_name(ep): + return ep.group, ep.name + + +def validate(eps: metadata.EntryPoints): + """ + Ensure entry points are unique by group and name and validate each. + """ + consume(map(ensure_valid, ensure_unique(eps, key=by_group_and_name))) + return eps + + +@functools.singledispatch +def load(eps): + """ + Given a Distribution.entry_points, produce EntryPoints. + """ + groups = itertools.chain.from_iterable( + load_group(value, group) + for group, value in eps.items()) + return validate(metadata.EntryPoints(groups)) + + +@load.register(str) +def _(eps): + r""" + >>> ep, = load('[console_scripts]\nfoo=bar') + >>> ep.group + 'console_scripts' + >>> ep.name + 'foo' + >>> ep.value + 'bar' + """ + return validate(metadata.EntryPoints(metadata.EntryPoints._from_text(eps))) + + +load.register(type(None), lambda x: x) + + +@pass_none +def render(eps: metadata.EntryPoints): + by_group = operator.attrgetter('group') + groups = itertools.groupby(sorted(eps, key=by_group), by_group) + + return '\n'.join( + f'[{group}]\n{render_items(items)}\n' + for group, items in groups + ) + + +def render_items(eps): + return '\n'.join( + f'{ep.name} = {ep.value}' + for ep in sorted(eps) + ) diff --git a/setuptools/_importlib.py b/setuptools/_importlib.py new file mode 100644 index 00000000..c1ac137e --- /dev/null +++ b/setuptools/_importlib.py @@ -0,0 +1,36 @@ +import sys + + +def disable_importlib_metadata_finder(metadata): + """ + Ensure importlib_metadata doesn't provide older, incompatible + Distributions. + + Workaround for #3102. + """ + try: + import importlib_metadata + except ImportError: + return + if importlib_metadata is metadata: + return + to_remove = [ + ob + for ob in sys.meta_path + if isinstance(ob, importlib_metadata.MetadataPathFinder) + ] + for item in to_remove: + sys.meta_path.remove(item) + + +if sys.version_info < (3, 10): + from setuptools.extern import importlib_metadata as metadata + disable_importlib_metadata_finder(metadata) +else: + import importlib.metadata as metadata # noqa: F401 + + +if sys.version_info < (3, 9): + from setuptools.extern import importlib_resources as resources +else: + import importlib.resources as resources # noqa: F401 diff --git a/setuptools/_itertools.py b/setuptools/_itertools.py new file mode 100644 index 00000000..b8bf6d21 --- /dev/null +++ b/setuptools/_itertools.py @@ -0,0 +1,23 @@ +from setuptools.extern.more_itertools import consume # noqa: F401 + + +# copied from jaraco.itertools 6.1 +def ensure_unique(iterable, key=lambda x: x): + """ + Wrap an iterable to raise a ValueError if non-unique values are encountered. + + >>> list(ensure_unique('abc')) + ['a', 'b', 'c'] + >>> consume(ensure_unique('abca')) + Traceback (most recent call last): + ... + ValueError: Duplicate element 'a' encountered. + """ + seen = set() + seen_add = seen.add + for element in iterable: + k = key(element) + if k in seen: + raise ValueError(f"Duplicate element {element!r} encountered.") + seen_add(k) + yield element diff --git a/setuptools/_path.py b/setuptools/_path.py new file mode 100644 index 00000000..ede9cb00 --- /dev/null +++ b/setuptools/_path.py @@ -0,0 +1,7 @@ +import os + + +def ensure_directory(path): + """Ensure that the parent directory of `path` exists""" + dirname = os.path.dirname(path) + os.makedirs(dirname, exist_ok=True) diff --git a/setuptools/_reqs.py b/setuptools/_reqs.py new file mode 100644 index 00000000..ca724174 --- /dev/null +++ b/setuptools/_reqs.py @@ -0,0 +1,19 @@ +import setuptools.extern.jaraco.text as text + +from pkg_resources import Requirement + + +def parse_strings(strs): + """ + Yield requirement strings for each specification in `strs`. + + `strs` must be a string, or a (possibly-nested) iterable thereof. + """ + return text.join_continuation(map(text.drop_comment, text.yield_lines(strs))) + + +def parse(strs): + """ + Deprecated drop-in replacement for pkg_resources.parse_requirements. + """ + return map(Requirement, parse_strings(strs)) diff --git a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/INSTALLER b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/LICENSE b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/LICENSE new file mode 100644 index 00000000..be7e092b --- /dev/null +++ b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/LICENSE @@ -0,0 +1,13 @@ +Copyright 2017-2019 Jason R. Coombs, Barry Warsaw + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/METADATA b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/METADATA new file mode 100644 index 00000000..fda4bc75 --- /dev/null +++ b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/METADATA @@ -0,0 +1,118 @@ +Metadata-Version: 2.1 +Name: importlib-metadata +Version: 4.11.1 +Summary: Read metadata from Python packages +Home-page: https://github.com/python/importlib_metadata +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.7 +License-File: LICENSE +Requires-Dist: zipp (>=0.5) +Requires-Dist: typing-extensions (>=3.6.4) ; python_version < "3.8" +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: perf +Requires-Dist: ipython ; extra == 'perf' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: packaging ; extra == 'testing' +Requires-Dist: pyfakefs ; extra == 'testing' +Requires-Dist: flufl.flake8 ; extra == 'testing' +Requires-Dist: pytest-perf (>=0.9.2) ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: importlib-resources (>=1.3) ; (python_version < "3.9") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/importlib_metadata.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/importlib_metadata + +.. image:: https://github.com/python/importlib_metadata/workflows/tests/badge.svg + :target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest + :target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2022-informational + :target: https://blog.jaraco.com/skeleton + + +Library to access the metadata for a Python package. + +This package supplies third-party access to the functionality of +`importlib.metadata <https://docs.python.org/3/library/importlib.metadata.html>`_ +including improvements added to subsequent Python versions. + + +Compatibility +============= + +New features are introduced in this third-party library and later merged +into CPython. The following table indicates which versions of this library +were contributed to different versions in the standard library: + +.. list-table:: + :header-rows: 1 + + * - importlib_metadata + - stdlib + * - 4.8 + - 3.11 + * - 4.4 + - 3.10 + * - 1.4 + - 3.8 + + +Usage +===== + +See the `online documentation <https://importlib_metadata.readthedocs.io/>`_ +for usage details. + +`Finder authors +<https://docs.python.org/3/reference/import.html#finders-and-loaders>`_ can +also add support for custom package installers. See the above documentation +for details. + + +Caveats +======= + +This project primarily supports third-party packages installed by PyPA +tools (or other conforming packages). It does not support: + +- Packages in the stdlib. +- Packages installed without metadata. + +Project details +=============== + + * Project home: https://github.com/python/importlib_metadata + * Report bugs at: https://github.com/python/importlib_metadata/issues + * Code hosting: https://github.com/python/importlib_metadata + * Documentation: https://importlib_metadata.readthedocs.io/ + + diff --git a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/RECORD b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/RECORD new file mode 100644 index 00000000..d8c2dff6 --- /dev/null +++ b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/RECORD @@ -0,0 +1,24 @@ +importlib_metadata-4.11.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+importlib_metadata-4.11.1.dist-info/LICENSE,sha256=wNe6dAchmJ1VvVB8D9oTc-gHHadCuaSBAev36sYEM6U,571
+importlib_metadata-4.11.1.dist-info/METADATA,sha256=XNgM09x6V8tbt6ugvKjiUxH9yB7pBdILWuWE5YNWHRw,3999
+importlib_metadata-4.11.1.dist-info/RECORD,,
+importlib_metadata-4.11.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_metadata-4.11.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
+importlib_metadata-4.11.1.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19
+importlib_metadata/__init__.py,sha256=Wkh_tb0u0Ds_615ByV9VLLjqgoOWirwMY8EW40oO3nM,30122
+importlib_metadata/__pycache__/__init__.cpython-310.pyc,,
+importlib_metadata/__pycache__/_adapters.cpython-310.pyc,,
+importlib_metadata/__pycache__/_collections.cpython-310.pyc,,
+importlib_metadata/__pycache__/_compat.cpython-310.pyc,,
+importlib_metadata/__pycache__/_functools.cpython-310.pyc,,
+importlib_metadata/__pycache__/_itertools.cpython-310.pyc,,
+importlib_metadata/__pycache__/_meta.cpython-310.pyc,,
+importlib_metadata/__pycache__/_text.cpython-310.pyc,,
+importlib_metadata/_adapters.py,sha256=B6fCi5-8mLVDFUZj3krI5nAo-mKp1dH_qIavyIyFrJs,1862
+importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743
+importlib_metadata/_compat.py,sha256=EU2XCFBPFByuI0Of6XkAuBYbzqSyjwwwwqmsK4ccna0,1826
+importlib_metadata/_functools.py,sha256=PsY2-4rrKX4RVeRC1oGp1lB1pmC9eKN88_f-bD9uOoA,2895
+importlib_metadata/_itertools.py,sha256=cvr_2v8BRbxcIl5x5ldfqdHjhI8Yi8s8yk50G_nm6jQ,2068
+importlib_metadata/_meta.py,sha256=_F48Hu_jFxkfKWz5wcYS8vO23qEygbVdF9r-6qh-hjE,1154
+importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166
+importlib_metadata/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/REQUESTED b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/REQUESTED diff --git a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/WHEEL b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/WHEEL new file mode 100644 index 00000000..becc9a66 --- /dev/null +++ b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/top_level.txt b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/top_level.txt new file mode 100644 index 00000000..bbb07547 --- /dev/null +++ b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/top_level.txt @@ -0,0 +1 @@ +importlib_metadata diff --git a/setuptools/_vendor/importlib_metadata/__init__.py b/setuptools/_vendor/importlib_metadata/__init__.py new file mode 100644 index 00000000..292e0c6d --- /dev/null +++ b/setuptools/_vendor/importlib_metadata/__init__.py @@ -0,0 +1,1047 @@ +import os +import re +import abc +import csv +import sys +from .. import zipp +import email +import pathlib +import operator +import textwrap +import warnings +import functools +import itertools +import posixpath +import collections + +from . import _adapters, _meta +from ._collections import FreezableDefaultDict, Pair +from ._compat import ( + NullFinder, + install, + pypy_partial, +) +from ._functools import method_cache, pass_none +from ._itertools import always_iterable, unique_everseen +from ._meta import PackageMetadata, SimplePath + +from contextlib import suppress +from importlib import import_module +from importlib.abc import MetaPathFinder +from itertools import starmap +from typing import List, Mapping, Optional, Union + + +__all__ = [ + 'Distribution', + 'DistributionFinder', + 'PackageMetadata', + 'PackageNotFoundError', + 'distribution', + 'distributions', + 'entry_points', + 'files', + 'metadata', + 'packages_distributions', + 'requires', + 'version', +] + + +class PackageNotFoundError(ModuleNotFoundError): + """The package was not found.""" + + def __str__(self): + return f"No package metadata was found for {self.name}" + + @property + def name(self): + (name,) = self.args + return name + + +class Sectioned: + """ + A simple entry point config parser for performance + + >>> for item in Sectioned.read(Sectioned._sample): + ... print(item) + Pair(name='sec1', value='# comments ignored') + Pair(name='sec1', value='a = 1') + Pair(name='sec1', value='b = 2') + Pair(name='sec2', value='a = 2') + + >>> res = Sectioned.section_pairs(Sectioned._sample) + >>> item = next(res) + >>> item.name + 'sec1' + >>> item.value + Pair(name='a', value='1') + >>> item = next(res) + >>> item.value + Pair(name='b', value='2') + >>> item = next(res) + >>> item.name + 'sec2' + >>> item.value + Pair(name='a', value='2') + >>> list(res) + [] + """ + + _sample = textwrap.dedent( + """ + [sec1] + # comments ignored + a = 1 + b = 2 + + [sec2] + a = 2 + """ + ).lstrip() + + @classmethod + def section_pairs(cls, text): + return ( + section._replace(value=Pair.parse(section.value)) + for section in cls.read(text, filter_=cls.valid) + if section.name is not None + ) + + @staticmethod + def read(text, filter_=None): + lines = filter(filter_, map(str.strip, text.splitlines())) + name = None + for value in lines: + section_match = value.startswith('[') and value.endswith(']') + if section_match: + name = value.strip('[]') + continue + yield Pair(name, value) + + @staticmethod + def valid(line): + return line and not line.startswith('#') + + +class DeprecatedTuple: + """ + Provide subscript item access for backward compatibility. + + >>> recwarn = getfixture('recwarn') + >>> ep = EntryPoint(name='name', value='value', group='group') + >>> ep[:] + ('name', 'value', 'group') + >>> ep[0] + 'name' + >>> len(recwarn) + 1 + """ + + _warn = functools.partial( + warnings.warn, + "EntryPoint tuple interface is deprecated. Access members by name.", + DeprecationWarning, + stacklevel=pypy_partial(2), + ) + + def __getitem__(self, item): + self._warn() + return self._key()[item] + + +class EntryPoint(DeprecatedTuple): + """An entry point as defined by Python packaging conventions. + + See `the packaging docs on entry points + <https://packaging.python.org/specifications/entry-points/>`_ + for more information. + """ + + pattern = re.compile( + r'(?P<module>[\w.]+)\s*' + r'(:\s*(?P<attr>[\w.]+)\s*)?' + r'((?P<extras>\[.*\])\s*)?$' + ) + """ + A regular expression describing the syntax for an entry point, + which might look like: + + - module + - package.module + - package.module:attribute + - package.module:object.attribute + - package.module:attr [extra1, extra2] + + Other combinations are possible as well. + + The expression is lenient about whitespace around the ':', + following the attr, and following any extras. + """ + + dist: Optional['Distribution'] = None + + def __init__(self, name, value, group): + vars(self).update(name=name, value=value, group=group) + + def load(self): + """Load the entry point from its definition. If only a module + is indicated by the value, return that module. Otherwise, + return the named object. + """ + match = self.pattern.match(self.value) + module = import_module(match.group('module')) + attrs = filter(None, (match.group('attr') or '').split('.')) + return functools.reduce(getattr, attrs, module) + + @property + def module(self): + match = self.pattern.match(self.value) + return match.group('module') + + @property + def attr(self): + match = self.pattern.match(self.value) + return match.group('attr') + + @property + def extras(self): + match = self.pattern.match(self.value) + return list(re.finditer(r'\w+', match.group('extras') or '')) + + def _for(self, dist): + vars(self).update(dist=dist) + return self + + def __iter__(self): + """ + Supply iter so one may construct dicts of EntryPoints by name. + """ + msg = ( + "Construction of dict of EntryPoints is deprecated in " + "favor of EntryPoints." + ) + warnings.warn(msg, DeprecationWarning) + return iter((self.name, self)) + + def matches(self, **params): + attrs = (getattr(self, param) for param in params) + return all(map(operator.eq, params.values(), attrs)) + + def _key(self): + return self.name, self.value, self.group + + def __lt__(self, other): + return self._key() < other._key() + + def __eq__(self, other): + return self._key() == other._key() + + def __setattr__(self, name, value): + raise AttributeError("EntryPoint objects are immutable.") + + def __repr__(self): + return ( + f'EntryPoint(name={self.name!r}, value={self.value!r}, ' + f'group={self.group!r})' + ) + + def __hash__(self): + return hash(self._key()) + + +class DeprecatedList(list): + """ + Allow an otherwise immutable object to implement mutability + for compatibility. + + >>> recwarn = getfixture('recwarn') + >>> dl = DeprecatedList(range(3)) + >>> dl[0] = 1 + >>> dl.append(3) + >>> del dl[3] + >>> dl.reverse() + >>> dl.sort() + >>> dl.extend([4]) + >>> dl.pop(-1) + 4 + >>> dl.remove(1) + >>> dl += [5] + >>> dl + [6] + [1, 2, 5, 6] + >>> dl + (6,) + [1, 2, 5, 6] + >>> dl.insert(0, 0) + >>> dl + [0, 1, 2, 5] + >>> dl == [0, 1, 2, 5] + True + >>> dl == (0, 1, 2, 5) + True + >>> len(recwarn) + 1 + """ + + __slots__ = () + + _warn = functools.partial( + warnings.warn, + "EntryPoints list interface is deprecated. Cast to list if needed.", + DeprecationWarning, + stacklevel=pypy_partial(2), + ) + + def _wrap_deprecated_method(method_name: str): # type: ignore + def wrapped(self, *args, **kwargs): + self._warn() + return getattr(super(), method_name)(*args, **kwargs) + + return method_name, wrapped + + locals().update( + map( + _wrap_deprecated_method, + '__setitem__ __delitem__ append reverse extend pop remove ' + '__iadd__ insert sort'.split(), + ) + ) + + def __add__(self, other): + if not isinstance(other, tuple): + self._warn() + other = tuple(other) + return self.__class__(tuple(self) + other) + + def __eq__(self, other): + if not isinstance(other, tuple): + self._warn() + other = tuple(other) + + return tuple(self).__eq__(other) + + +class EntryPoints(DeprecatedList): + """ + An immutable collection of selectable EntryPoint objects. + """ + + __slots__ = () + + def __getitem__(self, name): # -> EntryPoint: + """ + Get the EntryPoint in self matching name. + """ + if isinstance(name, int): + warnings.warn( + "Accessing entry points by index is deprecated. " + "Cast to tuple if needed.", + DeprecationWarning, + stacklevel=2, + ) + return super().__getitem__(name) + try: + return next(iter(self.select(name=name))) + except StopIteration: + raise KeyError(name) + + def select(self, **params): + """ + Select entry points from self that match the + given parameters (typically group and/or name). + """ + return EntryPoints(ep for ep in self if ep.matches(**params)) + + @property + def names(self): + """ + Return the set of all names of all entry points. + """ + return {ep.name for ep in self} + + @property + def groups(self): + """ + Return the set of all groups of all entry points. + + For coverage while SelectableGroups is present. + >>> EntryPoints().groups + set() + """ + return {ep.group for ep in self} + + @classmethod + def _from_text_for(cls, text, dist): + return cls(ep._for(dist) for ep in cls._from_text(text)) + + @staticmethod + def _from_text(text): + return ( + EntryPoint(name=item.value.name, value=item.value.value, group=item.name) + for item in Sectioned.section_pairs(text or '') + ) + + +class Deprecated: + """ + Compatibility add-in for mapping to indicate that + mapping behavior is deprecated. + + >>> recwarn = getfixture('recwarn') + >>> class DeprecatedDict(Deprecated, dict): pass + >>> dd = DeprecatedDict(foo='bar') + >>> dd.get('baz', None) + >>> dd['foo'] + 'bar' + >>> list(dd) + ['foo'] + >>> list(dd.keys()) + ['foo'] + >>> 'foo' in dd + True + >>> list(dd.values()) + ['bar'] + >>> len(recwarn) + 1 + """ + + _warn = functools.partial( + warnings.warn, + "SelectableGroups dict interface is deprecated. Use select.", + DeprecationWarning, + stacklevel=pypy_partial(2), + ) + + def __getitem__(self, name): + self._warn() + return super().__getitem__(name) + + def get(self, name, default=None): + self._warn() + return super().get(name, default) + + def __iter__(self): + self._warn() + return super().__iter__() + + def __contains__(self, *args): + self._warn() + return super().__contains__(*args) + + def keys(self): + self._warn() + return super().keys() + + def values(self): + self._warn() + return super().values() + + +class SelectableGroups(Deprecated, dict): + """ + A backward- and forward-compatible result from + entry_points that fully implements the dict interface. + """ + + @classmethod + def load(cls, eps): + by_group = operator.attrgetter('group') + ordered = sorted(eps, key=by_group) + grouped = itertools.groupby(ordered, by_group) + return cls((group, EntryPoints(eps)) for group, eps in grouped) + + @property + def _all(self): + """ + Reconstruct a list of all entrypoints from the groups. + """ + groups = super(Deprecated, self).values() + return EntryPoints(itertools.chain.from_iterable(groups)) + + @property + def groups(self): + return self._all.groups + + @property + def names(self): + """ + for coverage: + >>> SelectableGroups().names + set() + """ + return self._all.names + + def select(self, **params): + if not params: + return self + return self._all.select(**params) + + +class PackagePath(pathlib.PurePosixPath): + """A reference to a path in a package""" + + def read_text(self, encoding='utf-8'): + with self.locate().open(encoding=encoding) as stream: + return stream.read() + + def read_binary(self): + with self.locate().open('rb') as stream: + return stream.read() + + def locate(self): + """Return a path-like object for this path""" + return self.dist.locate_file(self) + + +class FileHash: + def __init__(self, spec): + self.mode, _, self.value = spec.partition('=') + + def __repr__(self): + return f'<FileHash mode: {self.mode} value: {self.value}>' + + +class Distribution: + """A Python distribution package.""" + + @abc.abstractmethod + def read_text(self, filename): + """Attempt to load metadata file given by the name. + + :param filename: The name of the file in the distribution info. + :return: The text if found, otherwise None. + """ + + @abc.abstractmethod + def locate_file(self, path): + """ + Given a path to a file in this distribution, return a path + to it. + """ + + @classmethod + def from_name(cls, name): + """Return the Distribution for the given package name. + + :param name: The name of the distribution package to search for. + :return: The Distribution instance (or subclass thereof) for the named + package, if found. + :raises PackageNotFoundError: When the named package's distribution + metadata cannot be found. + """ + for resolver in cls._discover_resolvers(): + dists = resolver(DistributionFinder.Context(name=name)) + dist = next(iter(dists), None) + if dist is not None: + return dist + else: + raise PackageNotFoundError(name) + + @classmethod + def discover(cls, **kwargs): + """Return an iterable of Distribution objects for all packages. + + Pass a ``context`` or pass keyword arguments for constructing + a context. + + :context: A ``DistributionFinder.Context`` object. + :return: Iterable of Distribution objects for all packages. + """ + context = kwargs.pop('context', None) + if context and kwargs: + raise ValueError("cannot accept context and kwargs") + context = context or DistributionFinder.Context(**kwargs) + return itertools.chain.from_iterable( + resolver(context) for resolver in cls._discover_resolvers() + ) + + @staticmethod + def at(path): + """Return a Distribution for the indicated metadata path + + :param path: a string or path-like object + :return: a concrete Distribution instance for the path + """ + return PathDistribution(pathlib.Path(path)) + + @staticmethod + def _discover_resolvers(): + """Search the meta_path for resolvers.""" + declared = ( + getattr(finder, 'find_distributions', None) for finder in sys.meta_path + ) + return filter(None, declared) + + @property + def metadata(self) -> _meta.PackageMetadata: + """Return the parsed metadata for this Distribution. + + The returned object will have keys that name the various bits of + metadata. See PEP 566 for details. + """ + text = ( + self.read_text('METADATA') + or self.read_text('PKG-INFO') + # This last clause is here to support old egg-info files. Its + # effect is to just end up using the PathDistribution's self._path + # (which points to the egg-info file) attribute unchanged. + or self.read_text('') + ) + return _adapters.Message(email.message_from_string(text)) + + @property + def name(self): + """Return the 'Name' metadata for the distribution package.""" + return self.metadata['Name'] + + @property + def _normalized_name(self): + """Return a normalized version of the name.""" + return Prepared.normalize(self.name) + + @property + def version(self): + """Return the 'Version' metadata for the distribution package.""" + return self.metadata['Version'] + + @property + def entry_points(self): + return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self) + + @property + def files(self): + """Files in this distribution. + + :return: List of PackagePath for this distribution or None + + Result is `None` if the metadata file that enumerates files + (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is + missing. + Result may be empty if the metadata exists but is empty. + """ + + def make_file(name, hash=None, size_str=None): + result = PackagePath(name) + result.hash = FileHash(hash) if hash else None + result.size = int(size_str) if size_str else None + result.dist = self + return result + + @pass_none + def make_files(lines): + return list(starmap(make_file, csv.reader(lines))) + + return make_files(self._read_files_distinfo() or self._read_files_egginfo()) + + def _read_files_distinfo(self): + """ + Read the lines of RECORD + """ + text = self.read_text('RECORD') + return text and text.splitlines() + + def _read_files_egginfo(self): + """ + SOURCES.txt might contain literal commas, so wrap each line + in quotes. + """ + text = self.read_text('SOURCES.txt') + return text and map('"{}"'.format, text.splitlines()) + + @property + def requires(self): + """Generated requirements specified for this Distribution""" + reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs() + return reqs and list(reqs) + + def _read_dist_info_reqs(self): + return self.metadata.get_all('Requires-Dist') + + def _read_egg_info_reqs(self): + source = self.read_text('requires.txt') + return pass_none(self._deps_from_requires_text)(source) + + @classmethod + def _deps_from_requires_text(cls, source): + return cls._convert_egg_info_reqs_to_simple_reqs(Sectioned.read(source)) + + @staticmethod + def _convert_egg_info_reqs_to_simple_reqs(sections): + """ + Historically, setuptools would solicit and store 'extra' + requirements, including those with environment markers, + in separate sections. More modern tools expect each + dependency to be defined separately, with any relevant + extras and environment markers attached directly to that + requirement. This method converts the former to the + latter. See _test_deps_from_requires_text for an example. + """ + + def make_condition(name): + return name and f'extra == "{name}"' + + def quoted_marker(section): + section = section or '' + extra, sep, markers = section.partition(':') + if extra and markers: + markers = f'({markers})' + conditions = list(filter(None, [markers, make_condition(extra)])) + return '; ' + ' and '.join(conditions) if conditions else '' + + def url_req_space(req): + """ + PEP 508 requires a space between the url_spec and the quoted_marker. + Ref python/importlib_metadata#357. + """ + # '@' is uniquely indicative of a url_req. + return ' ' * ('@' in req) + + for section in sections: + space = url_req_space(section.value) + yield section.value + space + quoted_marker(section.name) + + +class DistributionFinder(MetaPathFinder): + """ + A MetaPathFinder capable of discovering installed distributions. + """ + + class Context: + """ + Keyword arguments presented by the caller to + ``distributions()`` or ``Distribution.discover()`` + to narrow the scope of a search for distributions + in all DistributionFinders. + + Each DistributionFinder may expect any parameters + and should attempt to honor the canonical + parameters defined below when appropriate. + """ + + name = None + """ + Specific name for which a distribution finder should match. + A name of ``None`` matches all distributions. + """ + + def __init__(self, **kwargs): + vars(self).update(kwargs) + + @property + def path(self): + """ + The sequence of directory path that a distribution finder + should search. + + Typically refers to Python installed package paths such as + "site-packages" directories and defaults to ``sys.path``. + """ + return vars(self).get('path', sys.path) + + @abc.abstractmethod + def find_distributions(self, context=Context()): + """ + Find distributions. + + Return an iterable of all Distribution instances capable of + loading the metadata for packages matching the ``context``, + a DistributionFinder.Context instance. + """ + + +class FastPath: + """ + Micro-optimized class for searching a path for + children. + + >>> FastPath('').children() + ['...'] + """ + + @functools.lru_cache() # type: ignore + def __new__(cls, root): + return super().__new__(cls) + + def __init__(self, root): + self.root = str(root) + + def joinpath(self, child): + return pathlib.Path(self.root, child) + + def children(self): + with suppress(Exception): + return os.listdir(self.root or '.') + with suppress(Exception): + return self.zip_children() + return [] + + def zip_children(self): + zip_path = zipp.Path(self.root) + names = zip_path.root.namelist() + self.joinpath = zip_path.joinpath + + return dict.fromkeys(child.split(posixpath.sep, 1)[0] for child in names) + + def search(self, name): + return self.lookup(self.mtime).search(name) + + @property + def mtime(self): + with suppress(OSError): + return os.stat(self.root).st_mtime + self.lookup.cache_clear() + + @method_cache + def lookup(self, mtime): + return Lookup(self) + + +class Lookup: + def __init__(self, path: FastPath): + base = os.path.basename(path.root).lower() + base_is_egg = base.endswith(".egg") + self.infos = FreezableDefaultDict(list) + self.eggs = FreezableDefaultDict(list) + + for child in path.children(): + low = child.lower() + if low.endswith((".dist-info", ".egg-info")): + # rpartition is faster than splitext and suitable for this purpose. + name = low.rpartition(".")[0].partition("-")[0] + normalized = Prepared.normalize(name) + self.infos[normalized].append(path.joinpath(child)) + elif base_is_egg and low == "egg-info": + name = base.rpartition(".")[0].partition("-")[0] + legacy_normalized = Prepared.legacy_normalize(name) + self.eggs[legacy_normalized].append(path.joinpath(child)) + + self.infos.freeze() + self.eggs.freeze() + + def search(self, prepared): + infos = ( + self.infos[prepared.normalized] + if prepared + else itertools.chain.from_iterable(self.infos.values()) + ) + eggs = ( + self.eggs[prepared.legacy_normalized] + if prepared + else itertools.chain.from_iterable(self.eggs.values()) + ) + return itertools.chain(infos, eggs) + + +class Prepared: + """ + A prepared search for metadata on a possibly-named package. + """ + + normalized = None + legacy_normalized = None + + def __init__(self, name): + self.name = name + if name is None: + return + self.normalized = self.normalize(name) + self.legacy_normalized = self.legacy_normalize(name) + + @staticmethod + def normalize(name): + """ + PEP 503 normalization plus dashes as underscores. + """ + return re.sub(r"[-_.]+", "-", name).lower().replace('-', '_') + + @staticmethod + def legacy_normalize(name): + """ + Normalize the package name as found in the convention in + older packaging tools versions and specs. + """ + return name.lower().replace('-', '_') + + def __bool__(self): + return bool(self.name) + + +@install +class MetadataPathFinder(NullFinder, DistributionFinder): + """A degenerate finder for distribution packages on the file system. + + This finder supplies only a find_distributions() method for versions + of Python that do not have a PathFinder find_distributions(). + """ + + def find_distributions(self, context=DistributionFinder.Context()): + """ + Find distributions. + + Return an iterable of all Distribution instances capable of + loading the metadata for packages matching ``context.name`` + (or all names if ``None`` indicated) along the paths in the list + of directories ``context.path``. + """ + found = self._search_paths(context.name, context.path) + return map(PathDistribution, found) + + @classmethod + def _search_paths(cls, name, paths): + """Find metadata directories in paths heuristically.""" + prepared = Prepared(name) + return itertools.chain.from_iterable( + path.search(prepared) for path in map(FastPath, paths) + ) + + def invalidate_caches(cls): + FastPath.__new__.cache_clear() + + +class PathDistribution(Distribution): + def __init__(self, path: SimplePath): + """Construct a distribution. + + :param path: SimplePath indicating the metadata directory. + """ + self._path = path + + def read_text(self, filename): + with suppress( + FileNotFoundError, + IsADirectoryError, + KeyError, + NotADirectoryError, + PermissionError, + ): + return self._path.joinpath(filename).read_text(encoding='utf-8') + + read_text.__doc__ = Distribution.read_text.__doc__ + + def locate_file(self, path): + return self._path.parent / path + + @property + def _normalized_name(self): + """ + Performance optimization: where possible, resolve the + normalized name from the file system path. + """ + stem = os.path.basename(str(self._path)) + return self._name_from_stem(stem) or super()._normalized_name + + def _name_from_stem(self, stem): + name, ext = os.path.splitext(stem) + if ext not in ('.dist-info', '.egg-info'): + return + name, sep, rest = stem.partition('-') + return name + + +def distribution(distribution_name): + """Get the ``Distribution`` instance for the named package. + + :param distribution_name: The name of the distribution package as a string. + :return: A ``Distribution`` instance (or subclass thereof). + """ + return Distribution.from_name(distribution_name) + + +def distributions(**kwargs): + """Get all ``Distribution`` instances in the current environment. + + :return: An iterable of ``Distribution`` instances. + """ + return Distribution.discover(**kwargs) + + +def metadata(distribution_name) -> _meta.PackageMetadata: + """Get the metadata for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: A PackageMetadata containing the parsed metadata. + """ + return Distribution.from_name(distribution_name).metadata + + +def version(distribution_name): + """Get the version string for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: The version string for the package as defined in the package's + "Version" metadata key. + """ + return distribution(distribution_name).version + + +def entry_points(**params) -> Union[EntryPoints, SelectableGroups]: + """Return EntryPoint objects for all installed packages. + + Pass selection parameters (group or name) to filter the + result to entry points matching those properties (see + EntryPoints.select()). + + For compatibility, returns ``SelectableGroups`` object unless + selection parameters are supplied. In the future, this function + will return ``EntryPoints`` instead of ``SelectableGroups`` + even when no selection parameters are supplied. + + For maximum future compatibility, pass selection parameters + or invoke ``.select`` with parameters on the result. + + :return: EntryPoints or SelectableGroups for all installed packages. + """ + norm_name = operator.attrgetter('_normalized_name') + unique = functools.partial(unique_everseen, key=norm_name) + eps = itertools.chain.from_iterable( + dist.entry_points for dist in unique(distributions()) + ) + return SelectableGroups.load(eps).select(**params) + + +def files(distribution_name): + """Return a list of files for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: List of files composing the distribution. + """ + return distribution(distribution_name).files + + +def requires(distribution_name): + """ + Return a list of requirements for the named package. + + :return: An iterator of requirements, suitable for + packaging.requirement.Requirement. + """ + return distribution(distribution_name).requires + + +def packages_distributions() -> Mapping[str, List[str]]: + """ + Return a mapping of top-level packages to their + distributions. + + >>> import collections.abc + >>> pkgs = packages_distributions() + >>> all(isinstance(dist, collections.abc.Sequence) for dist in pkgs.values()) + True + """ + pkg_to_dist = collections.defaultdict(list) + for dist in distributions(): + for pkg in _top_level_declared(dist) or _top_level_inferred(dist): + pkg_to_dist[pkg].append(dist.metadata['Name']) + return dict(pkg_to_dist) + + +def _top_level_declared(dist): + return (dist.read_text('top_level.txt') or '').split() + + +def _top_level_inferred(dist): + return { + f.parts[0] if len(f.parts) > 1 else f.with_suffix('').name + for f in always_iterable(dist.files) + if f.suffix == ".py" + } diff --git a/setuptools/_vendor/importlib_metadata/_adapters.py b/setuptools/_vendor/importlib_metadata/_adapters.py new file mode 100644 index 00000000..aa460d3e --- /dev/null +++ b/setuptools/_vendor/importlib_metadata/_adapters.py @@ -0,0 +1,68 @@ +import re +import textwrap +import email.message + +from ._text import FoldedCase + + +class Message(email.message.Message): + multiple_use_keys = set( + map( + FoldedCase, + [ + 'Classifier', + 'Obsoletes-Dist', + 'Platform', + 'Project-URL', + 'Provides-Dist', + 'Provides-Extra', + 'Requires-Dist', + 'Requires-External', + 'Supported-Platform', + 'Dynamic', + ], + ) + ) + """ + Keys that may be indicated multiple times per PEP 566. + """ + + def __new__(cls, orig: email.message.Message): + res = super().__new__(cls) + vars(res).update(vars(orig)) + return res + + def __init__(self, *args, **kwargs): + self._headers = self._repair_headers() + + # suppress spurious error from mypy + def __iter__(self): + return super().__iter__() + + def _repair_headers(self): + def redent(value): + "Correct for RFC822 indentation" + if not value or '\n' not in value: + return value + return textwrap.dedent(' ' * 8 + value) + + headers = [(key, redent(value)) for key, value in vars(self)['_headers']] + if self._payload: + headers.append(('Description', self.get_payload())) + return headers + + @property + def json(self): + """ + Convert PackageMetadata to a JSON-compatible format + per PEP 0566. + """ + + def transform(key): + value = self.get_all(key) if key in self.multiple_use_keys else self[key] + if key == 'Keywords': + value = re.split(r'\s+', value) + tk = key.lower().replace('-', '_') + return tk, value + + return dict(map(transform, map(FoldedCase, self))) diff --git a/setuptools/_vendor/importlib_metadata/_collections.py b/setuptools/_vendor/importlib_metadata/_collections.py new file mode 100644 index 00000000..cf0954e1 --- /dev/null +++ b/setuptools/_vendor/importlib_metadata/_collections.py @@ -0,0 +1,30 @@ +import collections + + +# from jaraco.collections 3.3 +class FreezableDefaultDict(collections.defaultdict): + """ + Often it is desirable to prevent the mutation of + a default dict after its initial construction, such + as to prevent mutation during iteration. + + >>> dd = FreezableDefaultDict(list) + >>> dd[0].append('1') + >>> dd.freeze() + >>> dd[1] + [] + >>> len(dd) + 1 + """ + + def __missing__(self, key): + return getattr(self, '_frozen', super().__missing__)(key) + + def freeze(self): + self._frozen = lambda key: self.default_factory() + + +class Pair(collections.namedtuple('Pair', 'name value')): + @classmethod + def parse(cls, text): + return cls(*map(str.strip, text.split("=", 1))) diff --git a/setuptools/_vendor/importlib_metadata/_compat.py b/setuptools/_vendor/importlib_metadata/_compat.py new file mode 100644 index 00000000..ef3136f8 --- /dev/null +++ b/setuptools/_vendor/importlib_metadata/_compat.py @@ -0,0 +1,71 @@ +import sys +import platform + + +__all__ = ['install', 'NullFinder', 'Protocol'] + + +try: + from typing import Protocol +except ImportError: # pragma: no cover + from ..typing_extensions import Protocol # type: ignore + + +def install(cls): + """ + Class decorator for installation on sys.meta_path. + + Adds the backport DistributionFinder to sys.meta_path and + attempts to disable the finder functionality of the stdlib + DistributionFinder. + """ + sys.meta_path.append(cls()) + disable_stdlib_finder() + return cls + + +def disable_stdlib_finder(): + """ + Give the backport primacy for discovering path-based distributions + by monkey-patching the stdlib O_O. + + See #91 for more background for rationale on this sketchy + behavior. + """ + + def matches(finder): + return getattr( + finder, '__module__', None + ) == '_frozen_importlib_external' and hasattr(finder, 'find_distributions') + + for finder in filter(matches, sys.meta_path): # pragma: nocover + del finder.find_distributions + + +class NullFinder: + """ + A "Finder" (aka "MetaClassFinder") that never finds any modules, + but may find distributions. + """ + + @staticmethod + def find_spec(*args, **kwargs): + return None + + # In Python 2, the import system requires finders + # to have a find_module() method, but this usage + # is deprecated in Python 3 in favor of find_spec(). + # For the purposes of this finder (i.e. being present + # on sys.meta_path but having no other import + # system functionality), the two methods are identical. + find_module = find_spec + + +def pypy_partial(val): + """ + Adjust for variable stacklevel on partial under PyPy. + + Workaround for #327. + """ + is_pypy = platform.python_implementation() == 'PyPy' + return val + is_pypy diff --git a/setuptools/_vendor/importlib_metadata/_functools.py b/setuptools/_vendor/importlib_metadata/_functools.py new file mode 100644 index 00000000..71f66bd0 --- /dev/null +++ b/setuptools/_vendor/importlib_metadata/_functools.py @@ -0,0 +1,104 @@ +import types +import functools + + +# from jaraco.functools 3.3 +def method_cache(method, cache_wrapper=None): + """ + Wrap lru_cache to support storing the cache data in the object instances. + + Abstracts the common paradigm where the method explicitly saves an + underscore-prefixed protected property on first call and returns that + subsequently. + + >>> class MyClass: + ... calls = 0 + ... + ... @method_cache + ... def method(self, value): + ... self.calls += 1 + ... return value + + >>> a = MyClass() + >>> a.method(3) + 3 + >>> for x in range(75): + ... res = a.method(x) + >>> a.calls + 75 + + Note that the apparent behavior will be exactly like that of lru_cache + except that the cache is stored on each instance, so values in one + instance will not flush values from another, and when an instance is + deleted, so are the cached values for that instance. + + >>> b = MyClass() + >>> for x in range(35): + ... res = b.method(x) + >>> b.calls + 35 + >>> a.method(0) + 0 + >>> a.calls + 75 + + Note that if method had been decorated with ``functools.lru_cache()``, + a.calls would have been 76 (due to the cached value of 0 having been + flushed by the 'b' instance). + + Clear the cache with ``.cache_clear()`` + + >>> a.method.cache_clear() + + Same for a method that hasn't yet been called. + + >>> c = MyClass() + >>> c.method.cache_clear() + + Another cache wrapper may be supplied: + + >>> cache = functools.lru_cache(maxsize=2) + >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache) + >>> a = MyClass() + >>> a.method2() + 3 + + Caution - do not subsequently wrap the method with another decorator, such + as ``@property``, which changes the semantics of the function. + + See also + http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/ + for another implementation and additional justification. + """ + cache_wrapper = cache_wrapper or functools.lru_cache() + + def wrapper(self, *args, **kwargs): + # it's the first call, replace the method with a cached, bound method + bound_method = types.MethodType(method, self) + cached_method = cache_wrapper(bound_method) + setattr(self, method.__name__, cached_method) + return cached_method(*args, **kwargs) + + # Support cache clear even before cache has been created. + wrapper.cache_clear = lambda: None + + return wrapper + + +# From jaraco.functools 3.3 +def pass_none(func): + """ + Wrap func so it's not called if its first param is None + + >>> print_text = pass_none(print) + >>> print_text('text') + text + >>> print_text(None) + """ + + @functools.wraps(func) + def wrapper(param, *args, **kwargs): + if param is not None: + return func(param, *args, **kwargs) + + return wrapper diff --git a/setuptools/_vendor/importlib_metadata/_itertools.py b/setuptools/_vendor/importlib_metadata/_itertools.py new file mode 100644 index 00000000..d4ca9b91 --- /dev/null +++ b/setuptools/_vendor/importlib_metadata/_itertools.py @@ -0,0 +1,73 @@ +from itertools import filterfalse + + +def unique_everseen(iterable, key=None): + "List unique elements, preserving order. Remember all elements ever seen." + # unique_everseen('AAAABBBCCDAABBB') --> A B C D + # unique_everseen('ABBCcAD', str.lower) --> A B C D + seen = set() + seen_add = seen.add + if key is None: + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element + + +# copied from more_itertools 8.8 +def always_iterable(obj, base_type=(str, bytes)): + """If *obj* is iterable, return an iterator over its items:: + + >>> obj = (1, 2, 3) + >>> list(always_iterable(obj)) + [1, 2, 3] + + If *obj* is not iterable, return a one-item iterable containing *obj*:: + + >>> obj = 1 + >>> list(always_iterable(obj)) + [1] + + If *obj* is ``None``, return an empty iterable: + + >>> obj = None + >>> list(always_iterable(None)) + [] + + By default, binary and text strings are not considered iterable:: + + >>> obj = 'foo' + >>> list(always_iterable(obj)) + ['foo'] + + If *base_type* is set, objects for which ``isinstance(obj, base_type)`` + returns ``True`` won't be considered iterable. + + >>> obj = {'a': 1} + >>> list(always_iterable(obj)) # Iterate over the dict's keys + ['a'] + >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit + [{'a': 1}] + + Set *base_type* to ``None`` to avoid any special handling and treat objects + Python considers iterable as iterable: + + >>> obj = 'foo' + >>> list(always_iterable(obj, base_type=None)) + ['f', 'o', 'o'] + """ + if obj is None: + return iter(()) + + if (base_type is not None) and isinstance(obj, base_type): + return iter((obj,)) + + try: + return iter(obj) + except TypeError: + return iter((obj,)) diff --git a/setuptools/_vendor/importlib_metadata/_meta.py b/setuptools/_vendor/importlib_metadata/_meta.py new file mode 100644 index 00000000..37ee43e6 --- /dev/null +++ b/setuptools/_vendor/importlib_metadata/_meta.py @@ -0,0 +1,48 @@ +from ._compat import Protocol +from typing import Any, Dict, Iterator, List, TypeVar, Union + + +_T = TypeVar("_T") + + +class PackageMetadata(Protocol): + def __len__(self) -> int: + ... # pragma: no cover + + def __contains__(self, item: str) -> bool: + ... # pragma: no cover + + def __getitem__(self, key: str) -> str: + ... # pragma: no cover + + def __iter__(self) -> Iterator[str]: + ... # pragma: no cover + + def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]: + """ + Return all values associated with a possibly multi-valued key. + """ + + @property + def json(self) -> Dict[str, Union[str, List[str]]]: + """ + A JSON-compatible form of the metadata. + """ + + +class SimplePath(Protocol): + """ + A minimal subset of pathlib.Path required by PathDistribution. + """ + + def joinpath(self) -> 'SimplePath': + ... # pragma: no cover + + def __truediv__(self) -> 'SimplePath': + ... # pragma: no cover + + def parent(self) -> 'SimplePath': + ... # pragma: no cover + + def read_text(self) -> str: + ... # pragma: no cover diff --git a/setuptools/_vendor/importlib_metadata/_text.py b/setuptools/_vendor/importlib_metadata/_text.py new file mode 100644 index 00000000..c88cfbb2 --- /dev/null +++ b/setuptools/_vendor/importlib_metadata/_text.py @@ -0,0 +1,99 @@ +import re + +from ._functools import method_cache + + +# from jaraco.text 3.5 +class FoldedCase(str): + """ + A case insensitive string class; behaves just like str + except compares equal when the only variation is case. + + >>> s = FoldedCase('hello world') + + >>> s == 'Hello World' + True + + >>> 'Hello World' == s + True + + >>> s != 'Hello World' + False + + >>> s.index('O') + 4 + + >>> s.split('O') + ['hell', ' w', 'rld'] + + >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta'])) + ['alpha', 'Beta', 'GAMMA'] + + Sequence membership is straightforward. + + >>> "Hello World" in [s] + True + >>> s in ["Hello World"] + True + + You may test for set inclusion, but candidate and elements + must both be folded. + + >>> FoldedCase("Hello World") in {s} + True + >>> s in {FoldedCase("Hello World")} + True + + String inclusion works as long as the FoldedCase object + is on the right. + + >>> "hello" in FoldedCase("Hello World") + True + + But not if the FoldedCase object is on the left: + + >>> FoldedCase('hello') in 'Hello World' + False + + In that case, use in_: + + >>> FoldedCase('hello').in_('Hello World') + True + + >>> FoldedCase('hello') > FoldedCase('Hello') + False + """ + + def __lt__(self, other): + return self.lower() < other.lower() + + def __gt__(self, other): + return self.lower() > other.lower() + + def __eq__(self, other): + return self.lower() == other.lower() + + def __ne__(self, other): + return self.lower() != other.lower() + + def __hash__(self): + return hash(self.lower()) + + def __contains__(self, other): + return super().lower().__contains__(other.lower()) + + def in_(self, other): + "Does self appear in other?" + return self in FoldedCase(other) + + # cache lower since it's likely to be called frequently. + @method_cache + def lower(self): + return super().lower() + + def index(self, sub): + return self.lower().index(sub.lower()) + + def split(self, splitter=' ', maxsplit=0): + pattern = re.compile(re.escape(splitter), re.I) + return pattern.split(self, maxsplit) diff --git a/setuptools/_vendor/importlib_metadata/py.typed b/setuptools/_vendor/importlib_metadata/py.typed new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/importlib_metadata/py.typed diff --git a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/INSTALLER b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/LICENSE b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/LICENSE new file mode 100644 index 00000000..378b991a --- /dev/null +++ b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/LICENSE @@ -0,0 +1,13 @@ +Copyright 2017-2019 Brett Cannon, Barry Warsaw + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/METADATA b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/METADATA new file mode 100644 index 00000000..cdb1e783 --- /dev/null +++ b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/METADATA @@ -0,0 +1,86 @@ +Metadata-Version: 2.1 +Name: importlib-resources +Version: 5.4.0 +Summary: Read resources from Python packages +Home-page: https://github.com/python/importlib_resources +Author: Barry Warsaw +Author-email: barry@python.org +License: UNKNOWN +Project-URL: Documentation, https://importlib-resources.readthedocs.io/ +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.6 +License-File: LICENSE +Requires-Dist: zipp (>=3.1.0) ; python_version < "3.10" +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/importlib_resources.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/importlib_resources.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/importlib_resources + +.. image:: https://github.com/python/importlib_resources/workflows/tests/badge.svg + :target: https://github.com/python/importlib_resources/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://readthedocs.org/projects/importlib-resources/badge/?version=latest + :target: https://importlib-resources.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2021-informational + :target: https://blog.jaraco.com/skeleton + +``importlib_resources`` is a backport of Python standard library +`importlib.resources +<https://docs.python.org/3/library/importlib.html#module-importlib.resources>`_ +module for older Pythons. + +The key goal of this module is to replace parts of `pkg_resources +<https://setuptools.readthedocs.io/en/latest/pkg_resources.html>`_ with a +solution in Python's stdlib that relies on well-defined APIs. This makes +reading resources included in packages easier, with more stable and consistent +semantics. + +Compatibility +============= + +New features are introduced in this third-party library and later merged +into CPython. The following table indicates which versions of this library +were contributed to different versions in the standard library: + +.. list-table:: + :header-rows: 1 + + * - importlib_resources + - stdlib + * - 5.2 + - 3.11 + * - 5.0 + - 3.10 + * - 1.3 + - 3.9 + * - 0.5 (?) + - 3.7 + + diff --git a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/RECORD b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/RECORD new file mode 100644 index 00000000..7a68a2f2 --- /dev/null +++ b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/RECORD @@ -0,0 +1,75 @@ +importlib_resources-5.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+importlib_resources-5.4.0.dist-info/LICENSE,sha256=uWRjFdYGataJX2ziXk048ItUglQmjng3GWBALaWA36U,568
+importlib_resources-5.4.0.dist-info/METADATA,sha256=i5jH25IbM0Ls6u6UzSSCOa0c8hpDvePxqgnQwh2T5Io,3135
+importlib_resources-5.4.0.dist-info/RECORD,,
+importlib_resources-5.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources-5.4.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
+importlib_resources-5.4.0.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20
+importlib_resources/__init__.py,sha256=zuA0lbRgtVVCcAztM0z5LuBiOCV9L_3qtI6mW2p5xAg,525
+importlib_resources/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/__pycache__/_adapters.cpython-310.pyc,,
+importlib_resources/__pycache__/_common.cpython-310.pyc,,
+importlib_resources/__pycache__/_compat.cpython-310.pyc,,
+importlib_resources/__pycache__/_itertools.cpython-310.pyc,,
+importlib_resources/__pycache__/_legacy.cpython-310.pyc,,
+importlib_resources/__pycache__/abc.cpython-310.pyc,,
+importlib_resources/__pycache__/readers.cpython-310.pyc,,
+importlib_resources/__pycache__/simple.cpython-310.pyc,,
+importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504
+importlib_resources/_common.py,sha256=iIxAaQhotSh6TLLUEfL_ynU2fzEeyHMz9JcL46mUhLg,2741
+importlib_resources/_compat.py,sha256=3LpkIfeN9x4oXjRea5TxZP5VYhPlzuVRhGe-hEv-S0s,2704
+importlib_resources/_itertools.py,sha256=WCdJ1Gs_kNFwKENyIG7TO0Y434IWCu0zjVVSsSbZwU8,884
+importlib_resources/_legacy.py,sha256=TMLkx6aEM6U8xIREPXqGZrMbUhTiPUuPl6ESD7RdYj4,3494
+importlib_resources/abc.py,sha256=MvTJJXajbl74s36Gyeesf76egtbFnh-TMtzQMVhFWXo,3886
+importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/readers.py,sha256=_9QLGQ5AzrED3PY8S2Zf8V6yLR0-nqqYqtQmgleDJzY,3566
+importlib_resources/simple.py,sha256=xt0qhXbwt3bZ86zuaaKbTiE9A0mDbwu0saRjUq_pcY0,2836
+importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/_compat.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/test_contents.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/test_files.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/test_open.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/test_path.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/test_read.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/test_reader.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/test_resource.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/update-zips.cpython-310.pyc,,
+importlib_resources/tests/__pycache__/util.cpython-310.pyc,,
+importlib_resources/tests/_compat.py,sha256=QGI_4p0DXybypoYvw0kr3jfQqvls3p8u4wy4Wvf0Z_o,435
+importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data01/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
+importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
+importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/one/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13
+importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/two/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13
+importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
+importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
+importlib_resources/tests/test_compatibilty_files.py,sha256=NWkbIsylI8Wz3Dwsxo1quT4ZI6ToXFA2mojCG6Dzuxw,3260
+importlib_resources/tests/test_contents.py,sha256=V1Xfk3lqTDdvUsZuV18Kndf0CT_tkM2oEIwk9Vv0rhg,968
+importlib_resources/tests/test_files.py,sha256=1Nqv6VM_MjfwrmtXYL1a1CMT0QhCxi3hNMqwXlfMQTg,1184
+importlib_resources/tests/test_open.py,sha256=pmEgdrSFdM83L6FxtR8U_RT9BfI3JZ4snGmM_ZZIegY,2565
+importlib_resources/tests/test_path.py,sha256=xvPteNA-UKavDhKgLgrQuXSxKWYH7Q4nSNDVfBX95Gs,2103
+importlib_resources/tests/test_read.py,sha256=EyYvpHJ_7F4LuX2EU_c5EerIBQfRhOFmiIR7LOc5Y5E,2408
+importlib_resources/tests/test_reader.py,sha256=hgXHquqAEnioemv20ZZcDlVaiOrcZKADO37_FkiQ00Y,4286
+importlib_resources/tests/test_resource.py,sha256=DqfLNc9kaN5obqxU8kn0sRUWMf9MygagrpfMV5-QfWg,8145
+importlib_resources/tests/update-zips.py,sha256=x3iJVqWnMM5qp4Oob2Pl3o6Yi03sUjEv_5Wf-UCg3ps,1415
+importlib_resources/tests/util.py,sha256=X1j-0C96pu3_tmtJuLhzfBfcfMenOphDLkxtCt5j7t4,5309
+importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=z5Of4dsv3T0t-46B0MsVhxlhsPGMz28aUhJDWpj3_oY,876
+importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-310.pyc,,
+importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=ydI-_j-xgQ7tDxqBp9cjOqXBGxUp6ZBbwVJu6Xj-nrY,698
diff --git a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/REQUESTED b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/REQUESTED diff --git a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/WHEEL b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/WHEEL new file mode 100644 index 00000000..5bad85fd --- /dev/null +++ b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt new file mode 100644 index 00000000..58ad1bd3 --- /dev/null +++ b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt @@ -0,0 +1 @@ +importlib_resources diff --git a/setuptools/_vendor/importlib_resources/__init__.py b/setuptools/_vendor/importlib_resources/__init__.py new file mode 100644 index 00000000..34e3a995 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/__init__.py @@ -0,0 +1,36 @@ +"""Read resources contained within a package.""" + +from ._common import ( + as_file, + files, + Package, +) + +from ._legacy import ( + contents, + open_binary, + read_binary, + open_text, + read_text, + is_resource, + path, + Resource, +) + +from .abc import ResourceReader + + +__all__ = [ + 'Package', + 'Resource', + 'ResourceReader', + 'as_file', + 'contents', + 'files', + 'is_resource', + 'open_binary', + 'open_text', + 'path', + 'read_binary', + 'read_text', +] diff --git a/setuptools/_vendor/importlib_resources/_adapters.py b/setuptools/_vendor/importlib_resources/_adapters.py new file mode 100644 index 00000000..ea363d86 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/_adapters.py @@ -0,0 +1,170 @@ +from contextlib import suppress +from io import TextIOWrapper + +from . import abc + + +class SpecLoaderAdapter: + """ + Adapt a package spec to adapt the underlying loader. + """ + + def __init__(self, spec, adapter=lambda spec: spec.loader): + self.spec = spec + self.loader = adapter(spec) + + def __getattr__(self, name): + return getattr(self.spec, name) + + +class TraversableResourcesLoader: + """ + Adapt a loader to provide TraversableResources. + """ + + def __init__(self, spec): + self.spec = spec + + def get_resource_reader(self, name): + return CompatibilityFiles(self.spec)._native() + + +def _io_wrapper(file, mode='r', *args, **kwargs): + if mode == 'r': + return TextIOWrapper(file, *args, **kwargs) + elif mode == 'rb': + return file + raise ValueError( + "Invalid mode value '{}', only 'r' and 'rb' are supported".format(mode) + ) + + +class CompatibilityFiles: + """ + Adapter for an existing or non-existent resource reader + to provide a compatibility .files(). + """ + + class SpecPath(abc.Traversable): + """ + Path tied to a module spec. + Can be read and exposes the resource reader children. + """ + + def __init__(self, spec, reader): + self._spec = spec + self._reader = reader + + def iterdir(self): + if not self._reader: + return iter(()) + return iter( + CompatibilityFiles.ChildPath(self._reader, path) + for path in self._reader.contents() + ) + + def is_file(self): + return False + + is_dir = is_file + + def joinpath(self, other): + if not self._reader: + return CompatibilityFiles.OrphanPath(other) + return CompatibilityFiles.ChildPath(self._reader, other) + + @property + def name(self): + return self._spec.name + + def open(self, mode='r', *args, **kwargs): + return _io_wrapper(self._reader.open_resource(None), mode, *args, **kwargs) + + class ChildPath(abc.Traversable): + """ + Path tied to a resource reader child. + Can be read but doesn't expose any meaningful children. + """ + + def __init__(self, reader, name): + self._reader = reader + self._name = name + + def iterdir(self): + return iter(()) + + def is_file(self): + return self._reader.is_resource(self.name) + + def is_dir(self): + return not self.is_file() + + def joinpath(self, other): + return CompatibilityFiles.OrphanPath(self.name, other) + + @property + def name(self): + return self._name + + def open(self, mode='r', *args, **kwargs): + return _io_wrapper( + self._reader.open_resource(self.name), mode, *args, **kwargs + ) + + class OrphanPath(abc.Traversable): + """ + Orphan path, not tied to a module spec or resource reader. + Can't be read and doesn't expose any meaningful children. + """ + + def __init__(self, *path_parts): + if len(path_parts) < 1: + raise ValueError('Need at least one path part to construct a path') + self._path = path_parts + + def iterdir(self): + return iter(()) + + def is_file(self): + return False + + is_dir = is_file + + def joinpath(self, other): + return CompatibilityFiles.OrphanPath(*self._path, other) + + @property + def name(self): + return self._path[-1] + + def open(self, mode='r', *args, **kwargs): + raise FileNotFoundError("Can't open orphan path") + + def __init__(self, spec): + self.spec = spec + + @property + def _reader(self): + with suppress(AttributeError): + return self.spec.loader.get_resource_reader(self.spec.name) + + def _native(self): + """ + Return the native reader if it supports files(). + """ + reader = self._reader + return reader if hasattr(reader, 'files') else self + + def __getattr__(self, attr): + return getattr(self._reader, attr) + + def files(self): + return CompatibilityFiles.SpecPath(self.spec, self._reader) + + +def wrap_spec(package): + """ + Construct a package spec with traversable compatibility + on the spec/loader/reader. + """ + return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader) diff --git a/setuptools/_vendor/importlib_resources/_common.py b/setuptools/_vendor/importlib_resources/_common.py new file mode 100644 index 00000000..a12e2c75 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/_common.py @@ -0,0 +1,104 @@ +import os +import pathlib +import tempfile +import functools +import contextlib +import types +import importlib + +from typing import Union, Optional +from .abc import ResourceReader, Traversable + +from ._compat import wrap_spec + +Package = Union[types.ModuleType, str] + + +def files(package): + # type: (Package) -> Traversable + """ + Get a Traversable resource from a package + """ + return from_package(get_package(package)) + + +def get_resource_reader(package): + # type: (types.ModuleType) -> Optional[ResourceReader] + """ + Return the package's loader if it's a ResourceReader. + """ + # We can't use + # a issubclass() check here because apparently abc.'s __subclasscheck__() + # hook wants to create a weak reference to the object, but + # zipimport.zipimporter does not support weak references, resulting in a + # TypeError. That seems terrible. + spec = package.__spec__ + reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore + if reader is None: + return None + return reader(spec.name) # type: ignore + + +def resolve(cand): + # type: (Package) -> types.ModuleType + return cand if isinstance(cand, types.ModuleType) else importlib.import_module(cand) + + +def get_package(package): + # type: (Package) -> types.ModuleType + """Take a package name or module object and return the module. + + Raise an exception if the resolved module is not a package. + """ + resolved = resolve(package) + if wrap_spec(resolved).submodule_search_locations is None: + raise TypeError(f'{package!r} is not a package') + return resolved + + +def from_package(package): + """ + Return a Traversable object for the given package. + + """ + spec = wrap_spec(package) + reader = spec.loader.get_resource_reader(spec.name) + return reader.files() + + +@contextlib.contextmanager +def _tempfile(reader, suffix=''): + # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' + # blocks due to the need to close the temporary file to work on Windows + # properly. + fd, raw_path = tempfile.mkstemp(suffix=suffix) + try: + try: + os.write(fd, reader()) + finally: + os.close(fd) + del reader + yield pathlib.Path(raw_path) + finally: + try: + os.remove(raw_path) + except FileNotFoundError: + pass + + +@functools.singledispatch +def as_file(path): + """ + Given a Traversable object, return that object as a + path on the local file system in a context manager. + """ + return _tempfile(path.read_bytes, suffix=path.name) + + +@as_file.register(pathlib.Path) +@contextlib.contextmanager +def _(path): + """ + Degenerate behavior for pathlib.Path objects. + """ + yield path diff --git a/setuptools/_vendor/importlib_resources/_compat.py b/setuptools/_vendor/importlib_resources/_compat.py new file mode 100644 index 00000000..cb9fc820 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/_compat.py @@ -0,0 +1,98 @@ +# flake8: noqa + +import abc +import sys +import pathlib +from contextlib import suppress + +if sys.version_info >= (3, 10): + from zipfile import Path as ZipPath # type: ignore +else: + from ..zipp import Path as ZipPath # type: ignore + + +try: + from typing import runtime_checkable # type: ignore +except ImportError: + + def runtime_checkable(cls): # type: ignore + return cls + + +try: + from typing import Protocol # type: ignore +except ImportError: + Protocol = abc.ABC # type: ignore + + +class TraversableResourcesLoader: + """ + Adapt loaders to provide TraversableResources and other + compatibility. + + Used primarily for Python 3.9 and earlier where the native + loaders do not yet implement TraversableResources. + """ + + def __init__(self, spec): + self.spec = spec + + @property + def path(self): + return self.spec.origin + + def get_resource_reader(self, name): + from . import readers, _adapters + + def _zip_reader(spec): + with suppress(AttributeError): + return readers.ZipReader(spec.loader, spec.name) + + def _namespace_reader(spec): + with suppress(AttributeError, ValueError): + return readers.NamespaceReader(spec.submodule_search_locations) + + def _available_reader(spec): + with suppress(AttributeError): + return spec.loader.get_resource_reader(spec.name) + + def _native_reader(spec): + reader = _available_reader(spec) + return reader if hasattr(reader, 'files') else None + + def _file_reader(spec): + try: + path = pathlib.Path(self.path) + except TypeError: + return None + if path.exists(): + return readers.FileReader(self) + + return ( + # native reader if it supplies 'files' + _native_reader(self.spec) + or + # local ZipReader if a zip module + _zip_reader(self.spec) + or + # local NamespaceReader if a namespace module + _namespace_reader(self.spec) + or + # local FileReader + _file_reader(self.spec) + # fallback - adapt the spec ResourceReader to TraversableReader + or _adapters.CompatibilityFiles(self.spec) + ) + + +def wrap_spec(package): + """ + Construct a package spec with traversable compatibility + on the spec/loader/reader. + + Supersedes _adapters.wrap_spec to use TraversableResourcesLoader + from above for older Python compatibility (<3.10). + """ + from . import _adapters + + return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader) diff --git a/setuptools/_vendor/importlib_resources/_itertools.py b/setuptools/_vendor/importlib_resources/_itertools.py new file mode 100644 index 00000000..cce05582 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/_itertools.py @@ -0,0 +1,35 @@ +from itertools import filterfalse + +from typing import ( + Callable, + Iterable, + Iterator, + Optional, + Set, + TypeVar, + Union, +) + +# Type and type variable definitions +_T = TypeVar('_T') +_U = TypeVar('_U') + + +def unique_everseen( + iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = None +) -> Iterator[_T]: + "List unique elements, preserving order. Remember all elements ever seen." + # unique_everseen('AAAABBBCCDAABBB') --> A B C D + # unique_everseen('ABBCcAD', str.lower) --> A B C D + seen: Set[Union[_T, _U]] = set() + seen_add = seen.add + if key is None: + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element diff --git a/setuptools/_vendor/importlib_resources/_legacy.py b/setuptools/_vendor/importlib_resources/_legacy.py new file mode 100644 index 00000000..1d5d3f1f --- /dev/null +++ b/setuptools/_vendor/importlib_resources/_legacy.py @@ -0,0 +1,121 @@ +import functools +import os +import pathlib +import types +import warnings + +from typing import Union, Iterable, ContextManager, BinaryIO, TextIO, Any + +from . import _common + +Package = Union[types.ModuleType, str] +Resource = str + + +def deprecated(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + warnings.warn( + f"{func.__name__} is deprecated. Use files() instead. " + "Refer to https://importlib-resources.readthedocs.io" + "/en/latest/using.html#migrating-from-legacy for migration advice.", + DeprecationWarning, + stacklevel=2, + ) + return func(*args, **kwargs) + + return wrapper + + +def normalize_path(path): + # type: (Any) -> str + """Normalize a path by ensuring it is a string. + + If the resulting string contains path separators, an exception is raised. + """ + str_path = str(path) + parent, file_name = os.path.split(str_path) + if parent: + raise ValueError(f'{path!r} must be only a file name') + return file_name + + +@deprecated +def open_binary(package: Package, resource: Resource) -> BinaryIO: + """Return a file-like object opened for binary reading of the resource.""" + return (_common.files(package) / normalize_path(resource)).open('rb') + + +@deprecated +def read_binary(package: Package, resource: Resource) -> bytes: + """Return the binary contents of the resource.""" + return (_common.files(package) / normalize_path(resource)).read_bytes() + + +@deprecated +def open_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict', +) -> TextIO: + """Return a file-like object opened for text reading of the resource.""" + return (_common.files(package) / normalize_path(resource)).open( + 'r', encoding=encoding, errors=errors + ) + + +@deprecated +def read_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict', +) -> str: + """Return the decoded string of the resource. + + The decoding-related arguments have the same semantics as those of + bytes.decode(). + """ + with open_text(package, resource, encoding, errors) as fp: + return fp.read() + + +@deprecated +def contents(package: Package) -> Iterable[str]: + """Return an iterable of entries in `package`. + + Note that not all entries are resources. Specifically, directories are + not considered resources. Use `is_resource()` on each entry returned here + to check if it is a resource or not. + """ + return [path.name for path in _common.files(package).iterdir()] + + +@deprecated +def is_resource(package: Package, name: str) -> bool: + """True if `name` is a resource inside `package`. + + Directories are *not* resources. + """ + resource = normalize_path(name) + return any( + traversable.name == resource and traversable.is_file() + for traversable in _common.files(package).iterdir() + ) + + +@deprecated +def path( + package: Package, + resource: Resource, +) -> ContextManager[pathlib.Path]: + """A context manager providing a file path object to the resource. + + If the resource does not already exist on its own on the file system, + a temporary file will be created. If the file was created, the file + will be deleted upon exiting the context manager (no exception is + raised if the file was deleted prior to the context manager + exiting). + """ + return _common.as_file(_common.files(package) / normalize_path(resource)) diff --git a/setuptools/_vendor/importlib_resources/abc.py b/setuptools/_vendor/importlib_resources/abc.py new file mode 100644 index 00000000..d39dc1ad --- /dev/null +++ b/setuptools/_vendor/importlib_resources/abc.py @@ -0,0 +1,137 @@ +import abc +from typing import BinaryIO, Iterable, Text + +from ._compat import runtime_checkable, Protocol + + +class ResourceReader(metaclass=abc.ABCMeta): + """Abstract base class for loaders to provide resource reading support.""" + + @abc.abstractmethod + def open_resource(self, resource: Text) -> BinaryIO: + """Return an opened, file-like object for binary reading. + + The 'resource' argument is expected to represent only a file name. + If the resource cannot be found, FileNotFoundError is raised. + """ + # This deliberately raises FileNotFoundError instead of + # NotImplementedError so that if this method is accidentally called, + # it'll still do the right thing. + raise FileNotFoundError + + @abc.abstractmethod + def resource_path(self, resource: Text) -> Text: + """Return the file system path to the specified resource. + + The 'resource' argument is expected to represent only a file name. + If the resource does not exist on the file system, raise + FileNotFoundError. + """ + # This deliberately raises FileNotFoundError instead of + # NotImplementedError so that if this method is accidentally called, + # it'll still do the right thing. + raise FileNotFoundError + + @abc.abstractmethod + def is_resource(self, path: Text) -> bool: + """Return True if the named 'path' is a resource. + + Files are resources, directories are not. + """ + raise FileNotFoundError + + @abc.abstractmethod + def contents(self) -> Iterable[str]: + """Return an iterable of entries in `package`.""" + raise FileNotFoundError + + +@runtime_checkable +class Traversable(Protocol): + """ + An object with a subset of pathlib.Path methods suitable for + traversing directories and opening files. + """ + + @abc.abstractmethod + def iterdir(self): + """ + Yield Traversable objects in self + """ + + def read_bytes(self): + """ + Read contents of self as bytes + """ + with self.open('rb') as strm: + return strm.read() + + def read_text(self, encoding=None): + """ + Read contents of self as text + """ + with self.open(encoding=encoding) as strm: + return strm.read() + + @abc.abstractmethod + def is_dir(self) -> bool: + """ + Return True if self is a directory + """ + + @abc.abstractmethod + def is_file(self) -> bool: + """ + Return True if self is a file + """ + + @abc.abstractmethod + def joinpath(self, child): + """ + Return Traversable child in self + """ + + def __truediv__(self, child): + """ + Return Traversable child in self + """ + return self.joinpath(child) + + @abc.abstractmethod + def open(self, mode='r', *args, **kwargs): + """ + mode may be 'r' or 'rb' to open as text or binary. Return a handle + suitable for reading (same as pathlib.Path.open). + + When opening as text, accepts encoding parameters such as those + accepted by io.TextIOWrapper. + """ + + @abc.abstractproperty + def name(self) -> str: + """ + The base name of this object without any parent references. + """ + + +class TraversableResources(ResourceReader): + """ + The required interface for providing traversable + resources. + """ + + @abc.abstractmethod + def files(self): + """Return a Traversable object for the loaded package.""" + + def open_resource(self, resource): + return self.files().joinpath(resource).open('rb') + + def resource_path(self, resource): + raise FileNotFoundError(resource) + + def is_resource(self, path): + return self.files().joinpath(path).is_file() + + def contents(self): + return (item.name for item in self.files().iterdir()) diff --git a/setuptools/_vendor/importlib_resources/py.typed b/setuptools/_vendor/importlib_resources/py.typed new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/importlib_resources/py.typed diff --git a/setuptools/_vendor/importlib_resources/readers.py b/setuptools/_vendor/importlib_resources/readers.py new file mode 100644 index 00000000..f1190ca4 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/readers.py @@ -0,0 +1,122 @@ +import collections +import pathlib +import operator + +from . import abc + +from ._itertools import unique_everseen +from ._compat import ZipPath + + +def remove_duplicates(items): + return iter(collections.OrderedDict.fromkeys(items)) + + +class FileReader(abc.TraversableResources): + def __init__(self, loader): + self.path = pathlib.Path(loader.path).parent + + def resource_path(self, resource): + """ + Return the file system path to prevent + `resources.path()` from creating a temporary + copy. + """ + return str(self.path.joinpath(resource)) + + def files(self): + return self.path + + +class ZipReader(abc.TraversableResources): + def __init__(self, loader, module): + _, _, name = module.rpartition('.') + self.prefix = loader.prefix.replace('\\', '/') + name + '/' + self.archive = loader.archive + + def open_resource(self, resource): + try: + return super().open_resource(resource) + except KeyError as exc: + raise FileNotFoundError(exc.args[0]) + + def is_resource(self, path): + # workaround for `zipfile.Path.is_file` returning true + # for non-existent paths. + target = self.files().joinpath(path) + return target.is_file() and target.exists() + + def files(self): + return ZipPath(self.archive, self.prefix) + + +class MultiplexedPath(abc.Traversable): + """ + Given a series of Traversable objects, implement a merged + version of the interface across all objects. Useful for + namespace packages which may be multihomed at a single + name. + """ + + def __init__(self, *paths): + self._paths = list(map(pathlib.Path, remove_duplicates(paths))) + if not self._paths: + message = 'MultiplexedPath must contain at least one path' + raise FileNotFoundError(message) + if not all(path.is_dir() for path in self._paths): + raise NotADirectoryError('MultiplexedPath only supports directories') + + def iterdir(self): + files = (file for path in self._paths for file in path.iterdir()) + return unique_everseen(files, key=operator.attrgetter('name')) + + def read_bytes(self): + raise FileNotFoundError(f'{self} is not a file') + + def read_text(self, *args, **kwargs): + raise FileNotFoundError(f'{self} is not a file') + + def is_dir(self): + return True + + def is_file(self): + return False + + def joinpath(self, child): + # first try to find child in current paths + for file in self.iterdir(): + if file.name == child: + return file + # if it does not exist, construct it with the first path + return self._paths[0] / child + + __truediv__ = joinpath + + def open(self, *args, **kwargs): + raise FileNotFoundError(f'{self} is not a file') + + @property + def name(self): + return self._paths[0].name + + def __repr__(self): + paths = ', '.join(f"'{path}'" for path in self._paths) + return f'MultiplexedPath({paths})' + + +class NamespaceReader(abc.TraversableResources): + def __init__(self, namespace_path): + if 'NamespacePath' not in str(namespace_path): + raise ValueError('Invalid path') + self.path = MultiplexedPath(*list(namespace_path)) + + def resource_path(self, resource): + """ + Return the file system path to prevent + `resources.path()` from creating a temporary + copy. + """ + return str(self.path.joinpath(resource)) + + def files(self): + return self.path diff --git a/setuptools/_vendor/importlib_resources/simple.py b/setuptools/_vendor/importlib_resources/simple.py new file mode 100644 index 00000000..da073cbd --- /dev/null +++ b/setuptools/_vendor/importlib_resources/simple.py @@ -0,0 +1,116 @@ +""" +Interface adapters for low-level readers. +""" + +import abc +import io +import itertools +from typing import BinaryIO, List + +from .abc import Traversable, TraversableResources + + +class SimpleReader(abc.ABC): + """ + The minimum, low-level interface required from a resource + provider. + """ + + @abc.abstractproperty + def package(self): + # type: () -> str + """ + The name of the package for which this reader loads resources. + """ + + @abc.abstractmethod + def children(self): + # type: () -> List['SimpleReader'] + """ + Obtain an iterable of SimpleReader for available + child containers (e.g. directories). + """ + + @abc.abstractmethod + def resources(self): + # type: () -> List[str] + """ + Obtain available named resources for this virtual package. + """ + + @abc.abstractmethod + def open_binary(self, resource): + # type: (str) -> BinaryIO + """ + Obtain a File-like for a named resource. + """ + + @property + def name(self): + return self.package.split('.')[-1] + + +class ResourceHandle(Traversable): + """ + Handle to a named resource in a ResourceReader. + """ + + def __init__(self, parent, name): + # type: (ResourceContainer, str) -> None + self.parent = parent + self.name = name # type: ignore + + def is_file(self): + return True + + def is_dir(self): + return False + + def open(self, mode='r', *args, **kwargs): + stream = self.parent.reader.open_binary(self.name) + if 'b' not in mode: + stream = io.TextIOWrapper(*args, **kwargs) + return stream + + def joinpath(self, name): + raise RuntimeError("Cannot traverse into a resource") + + +class ResourceContainer(Traversable): + """ + Traversable container for a package's resources via its reader. + """ + + def __init__(self, reader): + # type: (SimpleReader) -> None + self.reader = reader + + def is_dir(self): + return True + + def is_file(self): + return False + + def iterdir(self): + files = (ResourceHandle(self, name) for name in self.reader.resources) + dirs = map(ResourceContainer, self.reader.children()) + return itertools.chain(files, dirs) + + def open(self, *args, **kwargs): + raise IsADirectoryError() + + def joinpath(self, name): + return next( + traversable for traversable in self.iterdir() if traversable.name == name + ) + + +class TraversableReader(TraversableResources, SimpleReader): + """ + A TraversableResources based on SimpleReader. Resource providers + may derive from this class to provide the TraversableResources + interface by supplying the SimpleReader interface. + """ + + def files(self): + return ResourceContainer(self) diff --git a/setuptools/_vendor/importlib_resources/tests/__init__.py b/setuptools/_vendor/importlib_resources/tests/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/__init__.py diff --git a/setuptools/_vendor/importlib_resources/tests/_compat.py b/setuptools/_vendor/importlib_resources/tests/_compat.py new file mode 100644 index 00000000..4c99cffd --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/_compat.py @@ -0,0 +1,19 @@ +import os + + +try: + from test.support import import_helper # type: ignore +except ImportError: + # Python 3.9 and earlier + class import_helper: # type: ignore + from test.support import modules_setup, modules_cleanup + + +try: + # Python 3.10 + from test.support.os_helper import unlink +except ImportError: + from test.support import unlink as _unlink + + def unlink(target): + return _unlink(os.fspath(target)) diff --git a/setuptools/_vendor/importlib_resources/tests/data01/__init__.py b/setuptools/_vendor/importlib_resources/tests/data01/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/data01/__init__.py diff --git a/setuptools/_vendor/importlib_resources/tests/data01/binary.file b/setuptools/_vendor/importlib_resources/tests/data01/binary.file Binary files differnew file mode 100644 index 00000000..eaf36c1d --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/data01/binary.file diff --git a/setuptools/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py b/setuptools/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py diff --git a/setuptools/_vendor/importlib_resources/tests/data01/subdirectory/binary.file b/setuptools/_vendor/importlib_resources/tests/data01/subdirectory/binary.file Binary files differnew file mode 100644 index 00000000..eaf36c1d --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/data01/subdirectory/binary.file diff --git a/setuptools/_vendor/importlib_resources/tests/data01/utf-16.file b/setuptools/_vendor/importlib_resources/tests/data01/utf-16.file Binary files differnew file mode 100644 index 00000000..2cb77229 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/data01/utf-16.file diff --git a/setuptools/_vendor/importlib_resources/tests/data01/utf-8.file b/setuptools/_vendor/importlib_resources/tests/data01/utf-8.file new file mode 100644 index 00000000..1c0132ad --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/data01/utf-8.file @@ -0,0 +1 @@ +Hello, UTF-8 world! diff --git a/setuptools/_vendor/importlib_resources/tests/data02/__init__.py b/setuptools/_vendor/importlib_resources/tests/data02/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/data02/__init__.py diff --git a/setuptools/_vendor/importlib_resources/tests/data02/one/__init__.py b/setuptools/_vendor/importlib_resources/tests/data02/one/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/data02/one/__init__.py diff --git a/setuptools/_vendor/importlib_resources/tests/data02/one/resource1.txt b/setuptools/_vendor/importlib_resources/tests/data02/one/resource1.txt new file mode 100644 index 00000000..61a813e4 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/data02/one/resource1.txt @@ -0,0 +1 @@ +one resource diff --git a/setuptools/_vendor/importlib_resources/tests/data02/two/__init__.py b/setuptools/_vendor/importlib_resources/tests/data02/two/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/data02/two/__init__.py diff --git a/setuptools/_vendor/importlib_resources/tests/data02/two/resource2.txt b/setuptools/_vendor/importlib_resources/tests/data02/two/resource2.txt new file mode 100644 index 00000000..a80ce46e --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/data02/two/resource2.txt @@ -0,0 +1 @@ +two resource diff --git a/setuptools/_vendor/importlib_resources/tests/namespacedata01/binary.file b/setuptools/_vendor/importlib_resources/tests/namespacedata01/binary.file Binary files differnew file mode 100644 index 00000000..eaf36c1d --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/namespacedata01/binary.file diff --git a/setuptools/_vendor/importlib_resources/tests/namespacedata01/utf-16.file b/setuptools/_vendor/importlib_resources/tests/namespacedata01/utf-16.file Binary files differnew file mode 100644 index 00000000..2cb77229 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/namespacedata01/utf-16.file diff --git a/setuptools/_vendor/importlib_resources/tests/namespacedata01/utf-8.file b/setuptools/_vendor/importlib_resources/tests/namespacedata01/utf-8.file new file mode 100644 index 00000000..1c0132ad --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/namespacedata01/utf-8.file @@ -0,0 +1 @@ +Hello, UTF-8 world! diff --git a/setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py b/setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py new file mode 100644 index 00000000..d92c7c56 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py @@ -0,0 +1,102 @@ +import io +import unittest + +import importlib_resources as resources + +from importlib_resources._adapters import ( + CompatibilityFiles, + wrap_spec, +) + +from . import util + + +class CompatibilityFilesTests(unittest.TestCase): + @property + def package(self): + bytes_data = io.BytesIO(b'Hello, world!') + return util.create_package( + file=bytes_data, + path='some_path', + contents=('a', 'b', 'c'), + ) + + @property + def files(self): + return resources.files(self.package) + + def test_spec_path_iter(self): + self.assertEqual( + sorted(path.name for path in self.files.iterdir()), + ['a', 'b', 'c'], + ) + + def test_child_path_iter(self): + self.assertEqual(list((self.files / 'a').iterdir()), []) + + def test_orphan_path_iter(self): + self.assertEqual(list((self.files / 'a' / 'a').iterdir()), []) + self.assertEqual(list((self.files / 'a' / 'a' / 'a').iterdir()), []) + + def test_spec_path_is(self): + self.assertFalse(self.files.is_file()) + self.assertFalse(self.files.is_dir()) + + def test_child_path_is(self): + self.assertTrue((self.files / 'a').is_file()) + self.assertFalse((self.files / 'a').is_dir()) + + def test_orphan_path_is(self): + self.assertFalse((self.files / 'a' / 'a').is_file()) + self.assertFalse((self.files / 'a' / 'a').is_dir()) + self.assertFalse((self.files / 'a' / 'a' / 'a').is_file()) + self.assertFalse((self.files / 'a' / 'a' / 'a').is_dir()) + + def test_spec_path_name(self): + self.assertEqual(self.files.name, 'testingpackage') + + def test_child_path_name(self): + self.assertEqual((self.files / 'a').name, 'a') + + def test_orphan_path_name(self): + self.assertEqual((self.files / 'a' / 'b').name, 'b') + self.assertEqual((self.files / 'a' / 'b' / 'c').name, 'c') + + def test_spec_path_open(self): + self.assertEqual(self.files.read_bytes(), b'Hello, world!') + self.assertEqual(self.files.read_text(), 'Hello, world!') + + def test_child_path_open(self): + self.assertEqual((self.files / 'a').read_bytes(), b'Hello, world!') + self.assertEqual((self.files / 'a').read_text(), 'Hello, world!') + + def test_orphan_path_open(self): + with self.assertRaises(FileNotFoundError): + (self.files / 'a' / 'b').read_bytes() + with self.assertRaises(FileNotFoundError): + (self.files / 'a' / 'b' / 'c').read_bytes() + + def test_open_invalid_mode(self): + with self.assertRaises(ValueError): + self.files.open('0') + + def test_orphan_path_invalid(self): + with self.assertRaises(ValueError): + CompatibilityFiles.OrphanPath() + + def test_wrap_spec(self): + spec = wrap_spec(self.package) + self.assertIsInstance(spec.loader.get_resource_reader(None), CompatibilityFiles) + + +class CompatibilityFilesNoReaderTests(unittest.TestCase): + @property + def package(self): + return util.create_package_from_loader(None) + + @property + def files(self): + return resources.files(self.package) + + def test_spec_path_joinpath(self): + self.assertIsInstance(self.files / 'a', CompatibilityFiles.OrphanPath) diff --git a/setuptools/_vendor/importlib_resources/tests/test_contents.py b/setuptools/_vendor/importlib_resources/tests/test_contents.py new file mode 100644 index 00000000..525568e8 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/test_contents.py @@ -0,0 +1,43 @@ +import unittest +import importlib_resources as resources + +from . import data01 +from . import util + + +class ContentsTests: + expected = { + '__init__.py', + 'binary.file', + 'subdirectory', + 'utf-16.file', + 'utf-8.file', + } + + def test_contents(self): + contents = {path.name for path in resources.files(self.data).iterdir()} + assert self.expected <= contents + + +class ContentsDiskTests(ContentsTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase): + pass + + +class ContentsNamespaceTests(ContentsTests, unittest.TestCase): + expected = { + # no __init__ because of namespace design + # no subdirectory as incidental difference in fixture + 'binary.file', + 'utf-16.file', + 'utf-8.file', + } + + def setUp(self): + from . import namespacedata01 + + self.data = namespacedata01 diff --git a/setuptools/_vendor/importlib_resources/tests/test_files.py b/setuptools/_vendor/importlib_resources/tests/test_files.py new file mode 100644 index 00000000..2676b49e --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/test_files.py @@ -0,0 +1,46 @@ +import typing +import unittest + +import importlib_resources as resources +from importlib_resources.abc import Traversable +from . import data01 +from . import util + + +class FilesTests: + def test_read_bytes(self): + files = resources.files(self.data) + actual = files.joinpath('utf-8.file').read_bytes() + assert actual == b'Hello, UTF-8 world!\n' + + def test_read_text(self): + files = resources.files(self.data) + actual = files.joinpath('utf-8.file').read_text(encoding='utf-8') + assert actual == 'Hello, UTF-8 world!\n' + + @unittest.skipUnless( + hasattr(typing, 'runtime_checkable'), + "Only suitable when typing supports runtime_checkable", + ) + def test_traversable(self): + assert isinstance(resources.files(self.data), Traversable) + + +class OpenDiskTests(FilesTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase): + pass + + +class OpenNamespaceTests(FilesTests, unittest.TestCase): + def setUp(self): + from . import namespacedata01 + + self.data = namespacedata01 + + +if __name__ == '__main__': + unittest.main() diff --git a/setuptools/_vendor/importlib_resources/tests/test_open.py b/setuptools/_vendor/importlib_resources/tests/test_open.py new file mode 100644 index 00000000..87b42c3d --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/test_open.py @@ -0,0 +1,81 @@ +import unittest + +import importlib_resources as resources +from . import data01 +from . import util + + +class CommonBinaryTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + target = resources.files(package).joinpath(path) + with target.open('rb'): + pass + + +class CommonTextTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + target = resources.files(package).joinpath(path) + with target.open(): + pass + + +class OpenTests: + def test_open_binary(self): + target = resources.files(self.data) / 'binary.file' + with target.open('rb') as fp: + result = fp.read() + self.assertEqual(result, b'\x00\x01\x02\x03') + + def test_open_text_default_encoding(self): + target = resources.files(self.data) / 'utf-8.file' + with target.open() as fp: + result = fp.read() + self.assertEqual(result, 'Hello, UTF-8 world!\n') + + def test_open_text_given_encoding(self): + target = resources.files(self.data) / 'utf-16.file' + with target.open(encoding='utf-16', errors='strict') as fp: + result = fp.read() + self.assertEqual(result, 'Hello, UTF-16 world!\n') + + def test_open_text_with_errors(self): + # Raises UnicodeError without the 'errors' argument. + target = resources.files(self.data) / 'utf-16.file' + with target.open(encoding='utf-8', errors='strict') as fp: + self.assertRaises(UnicodeError, fp.read) + with target.open(encoding='utf-8', errors='ignore') as fp: + result = fp.read() + self.assertEqual( + result, + 'H\x00e\x00l\x00l\x00o\x00,\x00 ' + '\x00U\x00T\x00F\x00-\x001\x006\x00 ' + '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00', + ) + + def test_open_binary_FileNotFoundError(self): + target = resources.files(self.data) / 'does-not-exist' + self.assertRaises(FileNotFoundError, target.open, 'rb') + + def test_open_text_FileNotFoundError(self): + target = resources.files(self.data) / 'does-not-exist' + self.assertRaises(FileNotFoundError, target.open) + + +class OpenDiskTests(OpenTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class OpenDiskNamespaceTests(OpenTests, unittest.TestCase): + def setUp(self): + from . import namespacedata01 + + self.data = namespacedata01 + + +class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase): + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/setuptools/_vendor/importlib_resources/tests/test_path.py b/setuptools/_vendor/importlib_resources/tests/test_path.py new file mode 100644 index 00000000..4f4d3943 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/test_path.py @@ -0,0 +1,64 @@ +import io +import unittest + +import importlib_resources as resources +from . import data01 +from . import util + + +class CommonTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + with resources.as_file(resources.files(package).joinpath(path)): + pass + + +class PathTests: + def test_reading(self): + # Path should be readable. + # Test also implicitly verifies the returned object is a pathlib.Path + # instance. + target = resources.files(self.data) / 'utf-8.file' + with resources.as_file(target) as path: + self.assertTrue(path.name.endswith("utf-8.file"), repr(path)) + # pathlib.Path.read_text() was introduced in Python 3.5. + with path.open('r', encoding='utf-8') as file: + text = file.read() + self.assertEqual('Hello, UTF-8 world!\n', text) + + +class PathDiskTests(PathTests, unittest.TestCase): + data = data01 + + def test_natural_path(self): + """ + Guarantee the internal implementation detail that + file-system-backed resources do not get the tempdir + treatment. + """ + target = resources.files(self.data) / 'utf-8.file' + with resources.as_file(target) as path: + assert 'data' in str(path) + + +class PathMemoryTests(PathTests, unittest.TestCase): + def setUp(self): + file = io.BytesIO(b'Hello, UTF-8 world!\n') + self.addCleanup(file.close) + self.data = util.create_package( + file=file, path=FileNotFoundError("package exists only in memory") + ) + self.data.__spec__.origin = None + self.data.__spec__.has_location = False + + +class PathZipTests(PathTests, util.ZipSetup, unittest.TestCase): + def test_remove_in_context_manager(self): + # It is not an error if the file that was temporarily stashed on the + # file system is removed inside the `with` stanza. + target = resources.files(self.data) / 'utf-8.file' + with resources.as_file(target) as path: + path.unlink() + + +if __name__ == '__main__': + unittest.main() diff --git a/setuptools/_vendor/importlib_resources/tests/test_read.py b/setuptools/_vendor/importlib_resources/tests/test_read.py new file mode 100644 index 00000000..41dd6db5 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/test_read.py @@ -0,0 +1,76 @@ +import unittest +import importlib_resources as resources + +from . import data01 +from . import util +from importlib import import_module + + +class CommonBinaryTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + resources.files(package).joinpath(path).read_bytes() + + +class CommonTextTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + resources.files(package).joinpath(path).read_text() + + +class ReadTests: + def test_read_bytes(self): + result = resources.files(self.data).joinpath('binary.file').read_bytes() + self.assertEqual(result, b'\0\1\2\3') + + def test_read_text_default_encoding(self): + result = resources.files(self.data).joinpath('utf-8.file').read_text() + self.assertEqual(result, 'Hello, UTF-8 world!\n') + + def test_read_text_given_encoding(self): + result = ( + resources.files(self.data) + .joinpath('utf-16.file') + .read_text(encoding='utf-16') + ) + self.assertEqual(result, 'Hello, UTF-16 world!\n') + + def test_read_text_with_errors(self): + # Raises UnicodeError without the 'errors' argument. + target = resources.files(self.data) / 'utf-16.file' + self.assertRaises(UnicodeError, target.read_text, encoding='utf-8') + result = target.read_text(encoding='utf-8', errors='ignore') + self.assertEqual( + result, + 'H\x00e\x00l\x00l\x00o\x00,\x00 ' + '\x00U\x00T\x00F\x00-\x001\x006\x00 ' + '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00', + ) + + +class ReadDiskTests(ReadTests, unittest.TestCase): + data = data01 + + +class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase): + def test_read_submodule_resource(self): + submodule = import_module('ziptestdata.subdirectory') + result = resources.files(submodule).joinpath('binary.file').read_bytes() + self.assertEqual(result, b'\0\1\2\3') + + def test_read_submodule_resource_by_name(self): + result = ( + resources.files('ziptestdata.subdirectory') + .joinpath('binary.file') + .read_bytes() + ) + self.assertEqual(result, b'\0\1\2\3') + + +class ReadNamespaceTests(ReadTests, unittest.TestCase): + def setUp(self): + from . import namespacedata01 + + self.data = namespacedata01 + + +if __name__ == '__main__': + unittest.main() diff --git a/setuptools/_vendor/importlib_resources/tests/test_reader.py b/setuptools/_vendor/importlib_resources/tests/test_reader.py new file mode 100644 index 00000000..16841a50 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/test_reader.py @@ -0,0 +1,128 @@ +import os.path +import sys +import pathlib +import unittest + +from importlib import import_module +from importlib_resources.readers import MultiplexedPath, NamespaceReader + + +class MultiplexedPathTest(unittest.TestCase): + @classmethod + def setUpClass(cls): + path = pathlib.Path(__file__).parent / 'namespacedata01' + cls.folder = str(path) + + def test_init_no_paths(self): + with self.assertRaises(FileNotFoundError): + MultiplexedPath() + + def test_init_file(self): + with self.assertRaises(NotADirectoryError): + MultiplexedPath(os.path.join(self.folder, 'binary.file')) + + def test_iterdir(self): + contents = {path.name for path in MultiplexedPath(self.folder).iterdir()} + try: + contents.remove('__pycache__') + except (KeyError, ValueError): + pass + self.assertEqual(contents, {'binary.file', 'utf-16.file', 'utf-8.file'}) + + def test_iterdir_duplicate(self): + data01 = os.path.abspath(os.path.join(__file__, '..', 'data01')) + contents = { + path.name for path in MultiplexedPath(self.folder, data01).iterdir() + } + for remove in ('__pycache__', '__init__.pyc'): + try: + contents.remove(remove) + except (KeyError, ValueError): + pass + self.assertEqual( + contents, + {'__init__.py', 'binary.file', 'subdirectory', 'utf-16.file', 'utf-8.file'}, + ) + + def test_is_dir(self): + self.assertEqual(MultiplexedPath(self.folder).is_dir(), True) + + def test_is_file(self): + self.assertEqual(MultiplexedPath(self.folder).is_file(), False) + + def test_open_file(self): + path = MultiplexedPath(self.folder) + with self.assertRaises(FileNotFoundError): + path.read_bytes() + with self.assertRaises(FileNotFoundError): + path.read_text() + with self.assertRaises(FileNotFoundError): + path.open() + + def test_join_path(self): + prefix = os.path.abspath(os.path.join(__file__, '..')) + data01 = os.path.join(prefix, 'data01') + path = MultiplexedPath(self.folder, data01) + self.assertEqual( + str(path.joinpath('binary.file'))[len(prefix) + 1 :], + os.path.join('namespacedata01', 'binary.file'), + ) + self.assertEqual( + str(path.joinpath('subdirectory'))[len(prefix) + 1 :], + os.path.join('data01', 'subdirectory'), + ) + self.assertEqual( + str(path.joinpath('imaginary'))[len(prefix) + 1 :], + os.path.join('namespacedata01', 'imaginary'), + ) + + def test_repr(self): + self.assertEqual( + repr(MultiplexedPath(self.folder)), + f"MultiplexedPath('{self.folder}')", + ) + + def test_name(self): + self.assertEqual( + MultiplexedPath(self.folder).name, + os.path.basename(self.folder), + ) + + +class NamespaceReaderTest(unittest.TestCase): + site_dir = str(pathlib.Path(__file__).parent) + + @classmethod + def setUpClass(cls): + sys.path.append(cls.site_dir) + + @classmethod + def tearDownClass(cls): + sys.path.remove(cls.site_dir) + + def test_init_error(self): + with self.assertRaises(ValueError): + NamespaceReader(['path1', 'path2']) + + def test_resource_path(self): + namespacedata01 = import_module('namespacedata01') + reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations) + + root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01')) + self.assertEqual( + reader.resource_path('binary.file'), os.path.join(root, 'binary.file') + ) + self.assertEqual( + reader.resource_path('imaginary'), os.path.join(root, 'imaginary') + ) + + def test_files(self): + namespacedata01 = import_module('namespacedata01') + reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations) + root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01')) + self.assertIsInstance(reader.files(), MultiplexedPath) + self.assertEqual(repr(reader.files()), f"MultiplexedPath('{root}')") + + +if __name__ == '__main__': + unittest.main() diff --git a/setuptools/_vendor/importlib_resources/tests/test_resource.py b/setuptools/_vendor/importlib_resources/tests/test_resource.py new file mode 100644 index 00000000..5affd8b0 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/test_resource.py @@ -0,0 +1,252 @@ +import sys +import unittest +import importlib_resources as resources +import uuid +import pathlib + +from . import data01 +from . import zipdata01, zipdata02 +from . import util +from importlib import import_module +from ._compat import import_helper, unlink + + +class ResourceTests: + # Subclasses are expected to set the `data` attribute. + + def test_is_file_exists(self): + target = resources.files(self.data) / 'binary.file' + self.assertTrue(target.is_file()) + + def test_is_file_missing(self): + target = resources.files(self.data) / 'not-a-file' + self.assertFalse(target.is_file()) + + def test_is_dir(self): + target = resources.files(self.data) / 'subdirectory' + self.assertFalse(target.is_file()) + self.assertTrue(target.is_dir()) + + +class ResourceDiskTests(ResourceTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class ResourceZipTests(ResourceTests, util.ZipSetup, unittest.TestCase): + pass + + +def names(traversable): + return {item.name for item in traversable.iterdir()} + + +class ResourceLoaderTests(unittest.TestCase): + def test_resource_contents(self): + package = util.create_package( + file=data01, path=data01.__file__, contents=['A', 'B', 'C'] + ) + self.assertEqual(names(resources.files(package)), {'A', 'B', 'C'}) + + def test_is_file(self): + package = util.create_package( + file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] + ) + self.assertTrue(resources.files(package).joinpath('B').is_file()) + + def test_is_dir(self): + package = util.create_package( + file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] + ) + self.assertTrue(resources.files(package).joinpath('D').is_dir()) + + def test_resource_missing(self): + package = util.create_package( + file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] + ) + self.assertFalse(resources.files(package).joinpath('Z').is_file()) + + +class ResourceCornerCaseTests(unittest.TestCase): + def test_package_has_no_reader_fallback(self): + # Test odd ball packages which: + # 1. Do not have a ResourceReader as a loader + # 2. Are not on the file system + # 3. Are not in a zip file + module = util.create_package( + file=data01, path=data01.__file__, contents=['A', 'B', 'C'] + ) + # Give the module a dummy loader. + module.__loader__ = object() + # Give the module a dummy origin. + module.__file__ = '/path/which/shall/not/be/named' + module.__spec__.loader = module.__loader__ + module.__spec__.origin = module.__file__ + self.assertFalse(resources.files(module).joinpath('A').is_file()) + + +class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase): + ZIP_MODULE = zipdata01 # type: ignore + + def test_is_submodule_resource(self): + submodule = import_module('ziptestdata.subdirectory') + self.assertTrue(resources.files(submodule).joinpath('binary.file').is_file()) + + def test_read_submodule_resource_by_name(self): + self.assertTrue( + resources.files('ziptestdata.subdirectory') + .joinpath('binary.file') + .is_file() + ) + + def test_submodule_contents(self): + submodule = import_module('ziptestdata.subdirectory') + self.assertEqual( + names(resources.files(submodule)), {'__init__.py', 'binary.file'} + ) + + def test_submodule_contents_by_name(self): + self.assertEqual( + names(resources.files('ziptestdata.subdirectory')), + {'__init__.py', 'binary.file'}, + ) + + +class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase): + ZIP_MODULE = zipdata02 # type: ignore + + def test_unrelated_contents(self): + """ + Test thata zip with two unrelated subpackages return + distinct resources. Ref python/importlib_resources#44. + """ + self.assertEqual( + names(resources.files('ziptestdata.one')), + {'__init__.py', 'resource1.txt'}, + ) + self.assertEqual( + names(resources.files('ziptestdata.two')), + {'__init__.py', 'resource2.txt'}, + ) + + +class DeletingZipsTest(unittest.TestCase): + """Having accessed resources in a zip file should not keep an open + reference to the zip. + """ + + ZIP_MODULE = zipdata01 + + def setUp(self): + modules = import_helper.modules_setup() + self.addCleanup(import_helper.modules_cleanup, *modules) + + data_path = pathlib.Path(self.ZIP_MODULE.__file__) + data_dir = data_path.parent + self.source_zip_path = data_dir / 'ziptestdata.zip' + self.zip_path = pathlib.Path(f'{uuid.uuid4()}.zip').absolute() + self.zip_path.write_bytes(self.source_zip_path.read_bytes()) + sys.path.append(str(self.zip_path)) + self.data = import_module('ziptestdata') + + def tearDown(self): + try: + sys.path.remove(str(self.zip_path)) + except ValueError: + pass + + try: + del sys.path_importer_cache[str(self.zip_path)] + del sys.modules[self.data.__name__] + except KeyError: + pass + + try: + unlink(self.zip_path) + except OSError: + # If the test fails, this will probably fail too + pass + + def test_iterdir_does_not_keep_open(self): + c = [item.name for item in resources.files('ziptestdata').iterdir()] + self.zip_path.unlink() + del c + + def test_is_file_does_not_keep_open(self): + c = resources.files('ziptestdata').joinpath('binary.file').is_file() + self.zip_path.unlink() + del c + + def test_is_file_failure_does_not_keep_open(self): + c = resources.files('ziptestdata').joinpath('not-present').is_file() + self.zip_path.unlink() + del c + + @unittest.skip("Desired but not supported.") + def test_as_file_does_not_keep_open(self): # pragma: no cover + c = resources.as_file(resources.files('ziptestdata') / 'binary.file') + self.zip_path.unlink() + del c + + def test_entered_path_does_not_keep_open(self): + # This is what certifi does on import to make its bundle + # available for the process duration. + c = resources.as_file( + resources.files('ziptestdata') / 'binary.file' + ).__enter__() + self.zip_path.unlink() + del c + + def test_read_binary_does_not_keep_open(self): + c = resources.files('ziptestdata').joinpath('binary.file').read_bytes() + self.zip_path.unlink() + del c + + def test_read_text_does_not_keep_open(self): + c = resources.files('ziptestdata').joinpath('utf-8.file').read_text() + self.zip_path.unlink() + del c + + +class ResourceFromNamespaceTest01(unittest.TestCase): + site_dir = str(pathlib.Path(__file__).parent) + + @classmethod + def setUpClass(cls): + sys.path.append(cls.site_dir) + + @classmethod + def tearDownClass(cls): + sys.path.remove(cls.site_dir) + + def test_is_submodule_resource(self): + self.assertTrue( + resources.files(import_module('namespacedata01')) + .joinpath('binary.file') + .is_file() + ) + + def test_read_submodule_resource_by_name(self): + self.assertTrue( + resources.files('namespacedata01').joinpath('binary.file').is_file() + ) + + def test_submodule_contents(self): + contents = names(resources.files(import_module('namespacedata01'))) + try: + contents.remove('__pycache__') + except KeyError: + pass + self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'}) + + def test_submodule_contents_by_name(self): + contents = names(resources.files('namespacedata01')) + try: + contents.remove('__pycache__') + except KeyError: + pass + self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'}) + + +if __name__ == '__main__': + unittest.main() diff --git a/setuptools/_vendor/importlib_resources/tests/update-zips.py b/setuptools/_vendor/importlib_resources/tests/update-zips.py new file mode 100644 index 00000000..9ef0224c --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/update-zips.py @@ -0,0 +1,53 @@ +""" +Generate the zip test data files. + +Run to build the tests/zipdataNN/ziptestdata.zip files from +files in tests/dataNN. + +Replaces the file with the working copy, but does commit anything +to the source repo. +""" + +import contextlib +import os +import pathlib +import zipfile + + +def main(): + """ + >>> from unittest import mock + >>> monkeypatch = getfixture('monkeypatch') + >>> monkeypatch.setattr(zipfile, 'ZipFile', mock.MagicMock()) + >>> print(); main() # print workaround for bpo-32509 + <BLANKLINE> + ...data01... -> ziptestdata/... + ... + ...data02... -> ziptestdata/... + ... + """ + suffixes = '01', '02' + tuple(map(generate, suffixes)) + + +def generate(suffix): + root = pathlib.Path(__file__).parent.relative_to(os.getcwd()) + zfpath = root / f'zipdata{suffix}/ziptestdata.zip' + with zipfile.ZipFile(zfpath, 'w') as zf: + for src, rel in walk(root / f'data{suffix}'): + dst = 'ziptestdata' / pathlib.PurePosixPath(rel.as_posix()) + print(src, '->', dst) + zf.write(src, dst) + + +def walk(datapath): + for dirpath, dirnames, filenames in os.walk(datapath): + with contextlib.suppress(KeyError): + dirnames.remove('__pycache__') + for filename in filenames: + res = pathlib.Path(dirpath) / filename + rel = res.relative_to(datapath) + yield res, rel + + +__name__ == '__main__' and main() diff --git a/setuptools/_vendor/importlib_resources/tests/util.py b/setuptools/_vendor/importlib_resources/tests/util.py new file mode 100644 index 00000000..c6d83e4b --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/util.py @@ -0,0 +1,178 @@ +import abc +import importlib +import io +import sys +import types +from pathlib import Path, PurePath + +from . import data01 +from . import zipdata01 +from ..abc import ResourceReader +from ._compat import import_helper + + +from importlib.machinery import ModuleSpec + + +class Reader(ResourceReader): + def __init__(self, **kwargs): + vars(self).update(kwargs) + + def get_resource_reader(self, package): + return self + + def open_resource(self, path): + self._path = path + if isinstance(self.file, Exception): + raise self.file + return self.file + + def resource_path(self, path_): + self._path = path_ + if isinstance(self.path, Exception): + raise self.path + return self.path + + def is_resource(self, path_): + self._path = path_ + if isinstance(self.path, Exception): + raise self.path + + def part(entry): + return entry.split('/') + + return any( + len(parts) == 1 and parts[0] == path_ for parts in map(part, self._contents) + ) + + def contents(self): + if isinstance(self.path, Exception): + raise self.path + yield from self._contents + + +def create_package_from_loader(loader, is_package=True): + name = 'testingpackage' + module = types.ModuleType(name) + spec = ModuleSpec(name, loader, origin='does-not-exist', is_package=is_package) + module.__spec__ = spec + module.__loader__ = loader + return module + + +def create_package(file=None, path=None, is_package=True, contents=()): + return create_package_from_loader( + Reader(file=file, path=path, _contents=contents), + is_package, + ) + + +class CommonTests(metaclass=abc.ABCMeta): + """ + Tests shared by test_open, test_path, and test_read. + """ + + @abc.abstractmethod + def execute(self, package, path): + """ + Call the pertinent legacy API function (e.g. open_text, path) + on package and path. + """ + + def test_package_name(self): + # Passing in the package name should succeed. + self.execute(data01.__name__, 'utf-8.file') + + def test_package_object(self): + # Passing in the package itself should succeed. + self.execute(data01, 'utf-8.file') + + def test_string_path(self): + # Passing in a string for the path should succeed. + path = 'utf-8.file' + self.execute(data01, path) + + def test_pathlib_path(self): + # Passing in a pathlib.PurePath object for the path should succeed. + path = PurePath('utf-8.file') + self.execute(data01, path) + + def test_importing_module_as_side_effect(self): + # The anchor package can already be imported. + del sys.modules[data01.__name__] + self.execute(data01.__name__, 'utf-8.file') + + def test_non_package_by_name(self): + # The anchor package cannot be a module. + with self.assertRaises(TypeError): + self.execute(__name__, 'utf-8.file') + + def test_non_package_by_package(self): + # The anchor package cannot be a module. + with self.assertRaises(TypeError): + module = sys.modules['importlib_resources.tests.util'] + self.execute(module, 'utf-8.file') + + def test_missing_path(self): + # Attempting to open or read or request the path for a + # non-existent path should succeed if open_resource + # can return a viable data stream. + bytes_data = io.BytesIO(b'Hello, world!') + package = create_package(file=bytes_data, path=FileNotFoundError()) + self.execute(package, 'utf-8.file') + self.assertEqual(package.__loader__._path, 'utf-8.file') + + def test_extant_path(self): + # Attempting to open or read or request the path when the + # path does exist should still succeed. Does not assert + # anything about the result. + bytes_data = io.BytesIO(b'Hello, world!') + # any path that exists + path = __file__ + package = create_package(file=bytes_data, path=path) + self.execute(package, 'utf-8.file') + self.assertEqual(package.__loader__._path, 'utf-8.file') + + def test_useless_loader(self): + package = create_package(file=FileNotFoundError(), path=FileNotFoundError()) + with self.assertRaises(FileNotFoundError): + self.execute(package, 'utf-8.file') + + +class ZipSetupBase: + ZIP_MODULE = None + + @classmethod + def setUpClass(cls): + data_path = Path(cls.ZIP_MODULE.__file__) + data_dir = data_path.parent + cls._zip_path = str(data_dir / 'ziptestdata.zip') + sys.path.append(cls._zip_path) + cls.data = importlib.import_module('ziptestdata') + + @classmethod + def tearDownClass(cls): + try: + sys.path.remove(cls._zip_path) + except ValueError: + pass + + try: + del sys.path_importer_cache[cls._zip_path] + del sys.modules[cls.data.__name__] + except KeyError: + pass + + try: + del cls.data + del cls._zip_path + except AttributeError: + pass + + def setUp(self): + modules = import_helper.modules_setup() + self.addCleanup(import_helper.modules_cleanup, *modules) + + +class ZipSetup(ZipSetupBase): + ZIP_MODULE = zipdata01 # type: ignore diff --git a/setuptools/_vendor/importlib_resources/tests/zipdata01/__init__.py b/setuptools/_vendor/importlib_resources/tests/zipdata01/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/zipdata01/__init__.py diff --git a/setuptools/_vendor/importlib_resources/tests/zipdata01/ziptestdata.zip b/setuptools/_vendor/importlib_resources/tests/zipdata01/ziptestdata.zip Binary files differnew file mode 100644 index 00000000..9a3bb073 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/zipdata01/ziptestdata.zip diff --git a/setuptools/_vendor/importlib_resources/tests/zipdata02/__init__.py b/setuptools/_vendor/importlib_resources/tests/zipdata02/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/zipdata02/__init__.py diff --git a/setuptools/_vendor/importlib_resources/tests/zipdata02/ziptestdata.zip b/setuptools/_vendor/importlib_resources/tests/zipdata02/ziptestdata.zip Binary files differnew file mode 100644 index 00000000..d63ff512 --- /dev/null +++ b/setuptools/_vendor/importlib_resources/tests/zipdata02/ziptestdata.zip diff --git a/setuptools/_vendor/jaraco.context-4.1.1.dist-info/INSTALLER b/setuptools/_vendor/jaraco.context-4.1.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/setuptools/_vendor/jaraco.context-4.1.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/setuptools/_vendor/jaraco.context-4.1.1.dist-info/LICENSE b/setuptools/_vendor/jaraco.context-4.1.1.dist-info/LICENSE new file mode 100644 index 00000000..353924be --- /dev/null +++ b/setuptools/_vendor/jaraco.context-4.1.1.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/setuptools/_vendor/jaraco.context-4.1.1.dist-info/METADATA b/setuptools/_vendor/jaraco.context-4.1.1.dist-info/METADATA new file mode 100644 index 00000000..908711b7 --- /dev/null +++ b/setuptools/_vendor/jaraco.context-4.1.1.dist-info/METADATA @@ -0,0 +1,52 @@ +Metadata-Version: 2.1 +Name: jaraco.context +Version: 4.1.1 +Summary: Context managers by jaraco +Home-page: https://github.com/jaraco/jaraco.context +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.6 +License-File: LICENSE +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/jaraco.context.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/jaraco.context.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/jaraco.context + +.. image:: https://github.com/jaraco/jaraco.context/workflows/tests/badge.svg + :target: https://github.com/jaraco/jaraco.context/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://readthedocs.org/projects/jaracocontext/badge/?version=latest + :target: https://jaracocontext.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2021-informational + :target: https://blog.jaraco.com/skeleton + + diff --git a/setuptools/_vendor/jaraco.context-4.1.1.dist-info/RECORD b/setuptools/_vendor/jaraco.context-4.1.1.dist-info/RECORD new file mode 100644 index 00000000..f40d48c7 --- /dev/null +++ b/setuptools/_vendor/jaraco.context-4.1.1.dist-info/RECORD @@ -0,0 +1,8 @@ +jaraco.context-4.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.context-4.1.1.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+jaraco.context-4.1.1.dist-info/METADATA,sha256=bvqDGCk6Z7TkohUqr5XZm19SbF9mVxrtXjN6uF_BAMQ,2031
+jaraco.context-4.1.1.dist-info/RECORD,,
+jaraco.context-4.1.1.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
+jaraco.context-4.1.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/__pycache__/context.cpython-310.pyc,,
+jaraco/context.py,sha256=7X1tpCLc5EN45iWGzGcsH0Unx62REIkvtRvglj0SiUA,5420
diff --git a/setuptools/_vendor/jaraco.context-4.1.1.dist-info/WHEEL b/setuptools/_vendor/jaraco.context-4.1.1.dist-info/WHEEL new file mode 100644 index 00000000..5bad85fd --- /dev/null +++ b/setuptools/_vendor/jaraco.context-4.1.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/setuptools/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt b/setuptools/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt new file mode 100644 index 00000000..f6205a5f --- /dev/null +++ b/setuptools/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt @@ -0,0 +1 @@ +jaraco diff --git a/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/INSTALLER b/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/LICENSE b/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/LICENSE new file mode 100644 index 00000000..353924be --- /dev/null +++ b/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/METADATA b/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/METADATA new file mode 100644 index 00000000..12dfbdd0 --- /dev/null +++ b/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/METADATA @@ -0,0 +1,58 @@ +Metadata-Version: 2.1 +Name: jaraco.functools +Version: 3.5.0 +Summary: Functools like those found in stdlib +Home-page: https://github.com/jaraco/jaraco.functools +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.7 +License-File: LICENSE +Requires-Dist: more-itertools +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: jaraco.classes ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/jaraco.functools.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/jaraco.functools.svg + +.. image:: https://img.shields.io/travis/jaraco/jaraco.functools/master.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/jaraco.functools + +.. image:: https://github.com/jaraco/jaraco.functools/workflows/tests/badge.svg + :target: https://github.com/jaraco/jaraco.functools/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://readthedocs.org/projects/jaracofunctools/badge/?version=latest + :target: https://jaracofunctools.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2021-informational + :target: https://blog.jaraco.com/skeleton + +Additional functools in the spirit of stdlib's functools. + + diff --git a/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/RECORD b/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/RECORD new file mode 100644 index 00000000..fbda3d1f --- /dev/null +++ b/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/RECORD @@ -0,0 +1,8 @@ +jaraco.functools-3.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.functools-3.5.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+jaraco.functools-3.5.0.dist-info/METADATA,sha256=cE9C7u9bo_GjLAuw4nML67a25kUaPDiHn4j03lG4jd0,2276
+jaraco.functools-3.5.0.dist-info/RECORD,,
+jaraco.functools-3.5.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
+jaraco.functools-3.5.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/__pycache__/functools.cpython-310.pyc,,
+jaraco/functools.py,sha256=PtEHbXZstgVJrwje4GvJOsz5pEbgslOcgEn2EJNpr2c,13494
diff --git a/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/WHEEL b/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/WHEEL new file mode 100644 index 00000000..5bad85fd --- /dev/null +++ b/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt b/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt new file mode 100644 index 00000000..f6205a5f --- /dev/null +++ b/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt @@ -0,0 +1 @@ +jaraco diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/INSTALLER b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/LICENSE b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/LICENSE new file mode 100644 index 00000000..353924be --- /dev/null +++ b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/METADATA b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/METADATA new file mode 100644 index 00000000..615a50a4 --- /dev/null +++ b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/METADATA @@ -0,0 +1,55 @@ +Metadata-Version: 2.1 +Name: jaraco.text +Version: 3.7.0 +Summary: Module for text manipulation +Home-page: https://github.com/jaraco/jaraco.text +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.6 +License-File: LICENSE +Requires-Dist: jaraco.functools +Requires-Dist: jaraco.context (>=4.1) +Requires-Dist: importlib-resources ; python_version < "3.9" +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/jaraco.text.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/jaraco.text.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/jaraco.text + +.. image:: https://github.com/jaraco/jaraco.text/workflows/tests/badge.svg + :target: https://github.com/jaraco/jaraco.text/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://readthedocs.org/projects/jaracotext/badge/?version=latest + :target: https://jaracotext.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2021-informational + :target: https://blog.jaraco.com/skeleton + + diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD new file mode 100644 index 00000000..916ad7d3 --- /dev/null +++ b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD @@ -0,0 +1,10 @@ +jaraco.text-3.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.text-3.7.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+jaraco.text-3.7.0.dist-info/METADATA,sha256=5mcR1dY0cJNrM-VIkAFkpjOgvgzmq6nM1GfD0gwTIhs,2136
+jaraco.text-3.7.0.dist-info/RECORD,,
+jaraco.text-3.7.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+jaraco.text-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
+jaraco.text-3.7.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/text/Lorem ipsum.txt,sha256=N_7c_79zxOufBY9HZ3yzMgOkNv-TkOTTio4BydrSjgs,1335
+jaraco/text/__init__.py,sha256=I56MW2ZFwPrYXIxzqxMBe2A1t-T4uZBgEgAKe9-JoqM,15538
+jaraco/text/__pycache__/__init__.cpython-310.pyc,,
diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/REQUESTED b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/REQUESTED diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/WHEEL b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/WHEEL new file mode 100644 index 00000000..becc9a66 --- /dev/null +++ b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt new file mode 100644 index 00000000..f6205a5f --- /dev/null +++ b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt @@ -0,0 +1 @@ +jaraco diff --git a/setuptools/_vendor/jaraco/__init__.py b/setuptools/_vendor/jaraco/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/jaraco/__init__.py diff --git a/setuptools/_vendor/jaraco/context.py b/setuptools/_vendor/jaraco/context.py new file mode 100644 index 00000000..87a4e3dc --- /dev/null +++ b/setuptools/_vendor/jaraco/context.py @@ -0,0 +1,213 @@ +import os +import subprocess +import contextlib +import functools +import tempfile +import shutil +import operator + + +@contextlib.contextmanager +def pushd(dir): + orig = os.getcwd() + os.chdir(dir) + try: + yield dir + finally: + os.chdir(orig) + + +@contextlib.contextmanager +def tarball_context(url, target_dir=None, runner=None, pushd=pushd): + """ + Get a tarball, extract it, change to that directory, yield, then + clean up. + `runner` is the function to invoke commands. + `pushd` is a context manager for changing the directory. + """ + if target_dir is None: + target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '') + if runner is None: + runner = functools.partial(subprocess.check_call, shell=True) + # In the tar command, use --strip-components=1 to strip the first path and + # then + # use -C to cause the files to be extracted to {target_dir}. This ensures + # that we always know where the files were extracted. + runner('mkdir {target_dir}'.format(**vars())) + try: + getter = 'wget {url} -O -' + extract = 'tar x{compression} --strip-components=1 -C {target_dir}' + cmd = ' | '.join((getter, extract)) + runner(cmd.format(compression=infer_compression(url), **vars())) + with pushd(target_dir): + yield target_dir + finally: + runner('rm -Rf {target_dir}'.format(**vars())) + + +def infer_compression(url): + """ + Given a URL or filename, infer the compression code for tar. + """ + # cheat and just assume it's the last two characters + compression_indicator = url[-2:] + mapping = dict(gz='z', bz='j', xz='J') + # Assume 'z' (gzip) if no match + return mapping.get(compression_indicator, 'z') + + +@contextlib.contextmanager +def temp_dir(remover=shutil.rmtree): + """ + Create a temporary directory context. Pass a custom remover + to override the removal behavior. + """ + temp_dir = tempfile.mkdtemp() + try: + yield temp_dir + finally: + remover(temp_dir) + + +@contextlib.contextmanager +def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir): + """ + Check out the repo indicated by url. + + If dest_ctx is supplied, it should be a context manager + to yield the target directory for the check out. + """ + exe = 'git' if 'git' in url else 'hg' + with dest_ctx() as repo_dir: + cmd = [exe, 'clone', url, repo_dir] + if branch: + cmd.extend(['--branch', branch]) + devnull = open(os.path.devnull, 'w') + stdout = devnull if quiet else None + subprocess.check_call(cmd, stdout=stdout) + yield repo_dir + + +@contextlib.contextmanager +def null(): + yield + + +class ExceptionTrap: + """ + A context manager that will catch certain exceptions and provide an + indication they occurred. + + >>> with ExceptionTrap() as trap: + ... raise Exception() + >>> bool(trap) + True + + >>> with ExceptionTrap() as trap: + ... pass + >>> bool(trap) + False + + >>> with ExceptionTrap(ValueError) as trap: + ... raise ValueError("1 + 1 is not 3") + >>> bool(trap) + True + + >>> with ExceptionTrap(ValueError) as trap: + ... raise Exception() + Traceback (most recent call last): + ... + Exception + + >>> bool(trap) + False + """ + + exc_info = None, None, None + + def __init__(self, exceptions=(Exception,)): + self.exceptions = exceptions + + def __enter__(self): + return self + + @property + def type(self): + return self.exc_info[0] + + @property + def value(self): + return self.exc_info[1] + + @property + def tb(self): + return self.exc_info[2] + + def __exit__(self, *exc_info): + type = exc_info[0] + matches = type and issubclass(type, self.exceptions) + if matches: + self.exc_info = exc_info + return matches + + def __bool__(self): + return bool(self.type) + + def raises(self, func, *, _test=bool): + """ + Wrap func and replace the result with the truth + value of the trap (True if an exception occurred). + + First, give the decorator an alias to support Python 3.8 + Syntax. + + >>> raises = ExceptionTrap(ValueError).raises + + Now decorate a function that always fails. + + >>> @raises + ... def fail(): + ... raise ValueError('failed') + >>> fail() + True + """ + + @functools.wraps(func) + def wrapper(*args, **kwargs): + with ExceptionTrap(self.exceptions) as trap: + func(*args, **kwargs) + return _test(trap) + + return wrapper + + def passes(self, func): + """ + Wrap func and replace the result with the truth + value of the trap (True if no exception). + + First, give the decorator an alias to support Python 3.8 + Syntax. + + >>> passes = ExceptionTrap(ValueError).passes + + Now decorate a function that always fails. + + >>> @passes + ... def fail(): + ... raise ValueError('failed') + + >>> fail() + False + """ + return self.raises(func, _test=operator.not_) + + +class suppress(contextlib.suppress, contextlib.ContextDecorator): + """ + A version of contextlib.suppress with decorator support. + + >>> @suppress(KeyError) + ... def key_error(): + ... {}[''] + >>> key_error() + """ diff --git a/setuptools/_vendor/jaraco/functools.py b/setuptools/_vendor/jaraco/functools.py new file mode 100644 index 00000000..bbd8b29f --- /dev/null +++ b/setuptools/_vendor/jaraco/functools.py @@ -0,0 +1,525 @@ +import functools +import time +import inspect +import collections +import types +import itertools + +import setuptools.extern.more_itertools + +from typing import Callable, TypeVar + + +CallableT = TypeVar("CallableT", bound=Callable[..., object]) + + +def compose(*funcs): + """ + Compose any number of unary functions into a single unary function. + + >>> import textwrap + >>> expected = str.strip(textwrap.dedent(compose.__doc__)) + >>> strip_and_dedent = compose(str.strip, textwrap.dedent) + >>> strip_and_dedent(compose.__doc__) == expected + True + + Compose also allows the innermost function to take arbitrary arguments. + + >>> round_three = lambda x: round(x, ndigits=3) + >>> f = compose(round_three, int.__truediv__) + >>> [f(3*x, x+1) for x in range(1,10)] + [1.5, 2.0, 2.25, 2.4, 2.5, 2.571, 2.625, 2.667, 2.7] + """ + + def compose_two(f1, f2): + return lambda *args, **kwargs: f1(f2(*args, **kwargs)) + + return functools.reduce(compose_two, funcs) + + +def method_caller(method_name, *args, **kwargs): + """ + Return a function that will call a named method on the + target object with optional positional and keyword + arguments. + + >>> lower = method_caller('lower') + >>> lower('MyString') + 'mystring' + """ + + def call_method(target): + func = getattr(target, method_name) + return func(*args, **kwargs) + + return call_method + + +def once(func): + """ + Decorate func so it's only ever called the first time. + + This decorator can ensure that an expensive or non-idempotent function + will not be expensive on subsequent calls and is idempotent. + + >>> add_three = once(lambda a: a+3) + >>> add_three(3) + 6 + >>> add_three(9) + 6 + >>> add_three('12') + 6 + + To reset the stored value, simply clear the property ``saved_result``. + + >>> del add_three.saved_result + >>> add_three(9) + 12 + >>> add_three(8) + 12 + + Or invoke 'reset()' on it. + + >>> add_three.reset() + >>> add_three(-3) + 0 + >>> add_three(0) + 0 + """ + + @functools.wraps(func) + def wrapper(*args, **kwargs): + if not hasattr(wrapper, 'saved_result'): + wrapper.saved_result = func(*args, **kwargs) + return wrapper.saved_result + + wrapper.reset = lambda: vars(wrapper).__delitem__('saved_result') + return wrapper + + +def method_cache( + method: CallableT, + cache_wrapper: Callable[ + [CallableT], CallableT + ] = functools.lru_cache(), # type: ignore[assignment] +) -> CallableT: + """ + Wrap lru_cache to support storing the cache data in the object instances. + + Abstracts the common paradigm where the method explicitly saves an + underscore-prefixed protected property on first call and returns that + subsequently. + + >>> class MyClass: + ... calls = 0 + ... + ... @method_cache + ... def method(self, value): + ... self.calls += 1 + ... return value + + >>> a = MyClass() + >>> a.method(3) + 3 + >>> for x in range(75): + ... res = a.method(x) + >>> a.calls + 75 + + Note that the apparent behavior will be exactly like that of lru_cache + except that the cache is stored on each instance, so values in one + instance will not flush values from another, and when an instance is + deleted, so are the cached values for that instance. + + >>> b = MyClass() + >>> for x in range(35): + ... res = b.method(x) + >>> b.calls + 35 + >>> a.method(0) + 0 + >>> a.calls + 75 + + Note that if method had been decorated with ``functools.lru_cache()``, + a.calls would have been 76 (due to the cached value of 0 having been + flushed by the 'b' instance). + + Clear the cache with ``.cache_clear()`` + + >>> a.method.cache_clear() + + Same for a method that hasn't yet been called. + + >>> c = MyClass() + >>> c.method.cache_clear() + + Another cache wrapper may be supplied: + + >>> cache = functools.lru_cache(maxsize=2) + >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache) + >>> a = MyClass() + >>> a.method2() + 3 + + Caution - do not subsequently wrap the method with another decorator, such + as ``@property``, which changes the semantics of the function. + + See also + http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/ + for another implementation and additional justification. + """ + + def wrapper(self: object, *args: object, **kwargs: object) -> object: + # it's the first call, replace the method with a cached, bound method + bound_method: CallableT = types.MethodType( # type: ignore[assignment] + method, self + ) + cached_method = cache_wrapper(bound_method) + setattr(self, method.__name__, cached_method) + return cached_method(*args, **kwargs) + + # Support cache clear even before cache has been created. + wrapper.cache_clear = lambda: None # type: ignore[attr-defined] + + return ( # type: ignore[return-value] + _special_method_cache(method, cache_wrapper) or wrapper + ) + + +def _special_method_cache(method, cache_wrapper): + """ + Because Python treats special methods differently, it's not + possible to use instance attributes to implement the cached + methods. + + Instead, install the wrapper method under a different name + and return a simple proxy to that wrapper. + + https://github.com/jaraco/jaraco.functools/issues/5 + """ + name = method.__name__ + special_names = '__getattr__', '__getitem__' + if name not in special_names: + return + + wrapper_name = '__cached' + name + + def proxy(self, *args, **kwargs): + if wrapper_name not in vars(self): + bound = types.MethodType(method, self) + cache = cache_wrapper(bound) + setattr(self, wrapper_name, cache) + else: + cache = getattr(self, wrapper_name) + return cache(*args, **kwargs) + + return proxy + + +def apply(transform): + """ + Decorate a function with a transform function that is + invoked on results returned from the decorated function. + + >>> @apply(reversed) + ... def get_numbers(start): + ... "doc for get_numbers" + ... return range(start, start+3) + >>> list(get_numbers(4)) + [6, 5, 4] + >>> get_numbers.__doc__ + 'doc for get_numbers' + """ + + def wrap(func): + return functools.wraps(func)(compose(transform, func)) + + return wrap + + +def result_invoke(action): + r""" + Decorate a function with an action function that is + invoked on the results returned from the decorated + function (for its side-effect), then return the original + result. + + >>> @result_invoke(print) + ... def add_two(a, b): + ... return a + b + >>> x = add_two(2, 3) + 5 + >>> x + 5 + """ + + def wrap(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + result = func(*args, **kwargs) + action(result) + return result + + return wrapper + + return wrap + + +def call_aside(f, *args, **kwargs): + """ + Call a function for its side effect after initialization. + + >>> @call_aside + ... def func(): print("called") + called + >>> func() + called + + Use functools.partial to pass parameters to the initial call + + >>> @functools.partial(call_aside, name='bingo') + ... def func(name): print("called with", name) + called with bingo + """ + f(*args, **kwargs) + return f + + +class Throttler: + """ + Rate-limit a function (or other callable) + """ + + def __init__(self, func, max_rate=float('Inf')): + if isinstance(func, Throttler): + func = func.func + self.func = func + self.max_rate = max_rate + self.reset() + + def reset(self): + self.last_called = 0 + + def __call__(self, *args, **kwargs): + self._wait() + return self.func(*args, **kwargs) + + def _wait(self): + "ensure at least 1/max_rate seconds from last call" + elapsed = time.time() - self.last_called + must_wait = 1 / self.max_rate - elapsed + time.sleep(max(0, must_wait)) + self.last_called = time.time() + + def __get__(self, obj, type=None): + return first_invoke(self._wait, functools.partial(self.func, obj)) + + +def first_invoke(func1, func2): + """ + Return a function that when invoked will invoke func1 without + any parameters (for its side-effect) and then invoke func2 + with whatever parameters were passed, returning its result. + """ + + def wrapper(*args, **kwargs): + func1() + return func2(*args, **kwargs) + + return wrapper + + +def retry_call(func, cleanup=lambda: None, retries=0, trap=()): + """ + Given a callable func, trap the indicated exceptions + for up to 'retries' times, invoking cleanup on the + exception. On the final attempt, allow any exceptions + to propagate. + """ + attempts = itertools.count() if retries == float('inf') else range(retries) + for attempt in attempts: + try: + return func() + except trap: + cleanup() + + return func() + + +def retry(*r_args, **r_kwargs): + """ + Decorator wrapper for retry_call. Accepts arguments to retry_call + except func and then returns a decorator for the decorated function. + + Ex: + + >>> @retry(retries=3) + ... def my_func(a, b): + ... "this is my funk" + ... print(a, b) + >>> my_func.__doc__ + 'this is my funk' + """ + + def decorate(func): + @functools.wraps(func) + def wrapper(*f_args, **f_kwargs): + bound = functools.partial(func, *f_args, **f_kwargs) + return retry_call(bound, *r_args, **r_kwargs) + + return wrapper + + return decorate + + +def print_yielded(func): + """ + Convert a generator into a function that prints all yielded elements + + >>> @print_yielded + ... def x(): + ... yield 3; yield None + >>> x() + 3 + None + """ + print_all = functools.partial(map, print) + print_results = compose(more_itertools.consume, print_all, func) + return functools.wraps(func)(print_results) + + +def pass_none(func): + """ + Wrap func so it's not called if its first param is None + + >>> print_text = pass_none(print) + >>> print_text('text') + text + >>> print_text(None) + """ + + @functools.wraps(func) + def wrapper(param, *args, **kwargs): + if param is not None: + return func(param, *args, **kwargs) + + return wrapper + + +def assign_params(func, namespace): + """ + Assign parameters from namespace where func solicits. + + >>> def func(x, y=3): + ... print(x, y) + >>> assigned = assign_params(func, dict(x=2, z=4)) + >>> assigned() + 2 3 + + The usual errors are raised if a function doesn't receive + its required parameters: + + >>> assigned = assign_params(func, dict(y=3, z=4)) + >>> assigned() + Traceback (most recent call last): + TypeError: func() ...argument... + + It even works on methods: + + >>> class Handler: + ... def meth(self, arg): + ... print(arg) + >>> assign_params(Handler().meth, dict(arg='crystal', foo='clear'))() + crystal + """ + sig = inspect.signature(func) + params = sig.parameters.keys() + call_ns = {k: namespace[k] for k in params if k in namespace} + return functools.partial(func, **call_ns) + + +def save_method_args(method): + """ + Wrap a method such that when it is called, the args and kwargs are + saved on the method. + + >>> class MyClass: + ... @save_method_args + ... def method(self, a, b): + ... print(a, b) + >>> my_ob = MyClass() + >>> my_ob.method(1, 2) + 1 2 + >>> my_ob._saved_method.args + (1, 2) + >>> my_ob._saved_method.kwargs + {} + >>> my_ob.method(a=3, b='foo') + 3 foo + >>> my_ob._saved_method.args + () + >>> my_ob._saved_method.kwargs == dict(a=3, b='foo') + True + + The arguments are stored on the instance, allowing for + different instance to save different args. + + >>> your_ob = MyClass() + >>> your_ob.method({str('x'): 3}, b=[4]) + {'x': 3} [4] + >>> your_ob._saved_method.args + ({'x': 3},) + >>> my_ob._saved_method.args + () + """ + args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs') + + @functools.wraps(method) + def wrapper(self, *args, **kwargs): + attr_name = '_saved_' + method.__name__ + attr = args_and_kwargs(args, kwargs) + setattr(self, attr_name, attr) + return method(self, *args, **kwargs) + + return wrapper + + +def except_(*exceptions, replace=None, use=None): + """ + Replace the indicated exceptions, if raised, with the indicated + literal replacement or evaluated expression (if present). + + >>> safe_int = except_(ValueError)(int) + >>> safe_int('five') + >>> safe_int('5') + 5 + + Specify a literal replacement with ``replace``. + + >>> safe_int_r = except_(ValueError, replace=0)(int) + >>> safe_int_r('five') + 0 + + Provide an expression to ``use`` to pass through particular parameters. + + >>> safe_int_pt = except_(ValueError, use='args[0]')(int) + >>> safe_int_pt('five') + 'five' + + """ + + def decorate(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except exceptions: + try: + return eval(use) + except TypeError: + return replace + + return wrapper + + return decorate diff --git a/setuptools/_vendor/jaraco/text/Lorem ipsum.txt b/setuptools/_vendor/jaraco/text/Lorem ipsum.txt new file mode 100644 index 00000000..986f944b --- /dev/null +++ b/setuptools/_vendor/jaraco/text/Lorem ipsum.txt @@ -0,0 +1,2 @@ +Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. +Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus magna felis sollicitudin mauris. Integer in mauris eu nibh euismod gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue, eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis, neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis, molestie eu, feugiat in, orci. In hac habitasse platea dictumst. diff --git a/setuptools/_vendor/jaraco/text/__init__.py b/setuptools/_vendor/jaraco/text/__init__.py new file mode 100644 index 00000000..a0306d5f --- /dev/null +++ b/setuptools/_vendor/jaraco/text/__init__.py @@ -0,0 +1,599 @@ +import re +import itertools +import textwrap +import functools + +try: + from importlib.resources import files # type: ignore +except ImportError: # pragma: nocover + from setuptools.extern.importlib_resources import files # type: ignore + +from setuptools.extern.jaraco.functools import compose, method_cache +from setuptools.extern.jaraco.context import ExceptionTrap + + +def substitution(old, new): + """ + Return a function that will perform a substitution on a string + """ + return lambda s: s.replace(old, new) + + +def multi_substitution(*substitutions): + """ + Take a sequence of pairs specifying substitutions, and create + a function that performs those substitutions. + + >>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo') + 'baz' + """ + substitutions = itertools.starmap(substitution, substitutions) + # compose function applies last function first, so reverse the + # substitutions to get the expected order. + substitutions = reversed(tuple(substitutions)) + return compose(*substitutions) + + +class FoldedCase(str): + """ + A case insensitive string class; behaves just like str + except compares equal when the only variation is case. + + >>> s = FoldedCase('hello world') + + >>> s == 'Hello World' + True + + >>> 'Hello World' == s + True + + >>> s != 'Hello World' + False + + >>> s.index('O') + 4 + + >>> s.split('O') + ['hell', ' w', 'rld'] + + >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta'])) + ['alpha', 'Beta', 'GAMMA'] + + Sequence membership is straightforward. + + >>> "Hello World" in [s] + True + >>> s in ["Hello World"] + True + + You may test for set inclusion, but candidate and elements + must both be folded. + + >>> FoldedCase("Hello World") in {s} + True + >>> s in {FoldedCase("Hello World")} + True + + String inclusion works as long as the FoldedCase object + is on the right. + + >>> "hello" in FoldedCase("Hello World") + True + + But not if the FoldedCase object is on the left: + + >>> FoldedCase('hello') in 'Hello World' + False + + In that case, use ``in_``: + + >>> FoldedCase('hello').in_('Hello World') + True + + >>> FoldedCase('hello') > FoldedCase('Hello') + False + """ + + def __lt__(self, other): + return self.lower() < other.lower() + + def __gt__(self, other): + return self.lower() > other.lower() + + def __eq__(self, other): + return self.lower() == other.lower() + + def __ne__(self, other): + return self.lower() != other.lower() + + def __hash__(self): + return hash(self.lower()) + + def __contains__(self, other): + return super().lower().__contains__(other.lower()) + + def in_(self, other): + "Does self appear in other?" + return self in FoldedCase(other) + + # cache lower since it's likely to be called frequently. + @method_cache + def lower(self): + return super().lower() + + def index(self, sub): + return self.lower().index(sub.lower()) + + def split(self, splitter=' ', maxsplit=0): + pattern = re.compile(re.escape(splitter), re.I) + return pattern.split(self, maxsplit) + + +# Python 3.8 compatibility +_unicode_trap = ExceptionTrap(UnicodeDecodeError) + + +@_unicode_trap.passes +def is_decodable(value): + r""" + Return True if the supplied value is decodable (using the default + encoding). + + >>> is_decodable(b'\xff') + False + >>> is_decodable(b'\x32') + True + """ + value.decode() + + +def is_binary(value): + r""" + Return True if the value appears to be binary (that is, it's a byte + string and isn't decodable). + + >>> is_binary(b'\xff') + True + >>> is_binary('\xff') + False + """ + return isinstance(value, bytes) and not is_decodable(value) + + +def trim(s): + r""" + Trim something like a docstring to remove the whitespace that + is common due to indentation and formatting. + + >>> trim("\n\tfoo = bar\n\t\tbar = baz\n") + 'foo = bar\n\tbar = baz' + """ + return textwrap.dedent(s).strip() + + +def wrap(s): + """ + Wrap lines of text, retaining existing newlines as + paragraph markers. + + >>> print(wrap(lorem_ipsum)) + Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do + eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad + minim veniam, quis nostrud exercitation ullamco laboris nisi ut + aliquip ex ea commodo consequat. Duis aute irure dolor in + reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla + pariatur. Excepteur sint occaecat cupidatat non proident, sunt in + culpa qui officia deserunt mollit anim id est laborum. + <BLANKLINE> + Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam + varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus + magna felis sollicitudin mauris. Integer in mauris eu nibh euismod + gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis + risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue, + eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas + fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla + a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis, + neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing + sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque + nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus + quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis, + molestie eu, feugiat in, orci. In hac habitasse platea dictumst. + """ + paragraphs = s.splitlines() + wrapped = ('\n'.join(textwrap.wrap(para)) for para in paragraphs) + return '\n\n'.join(wrapped) + + +def unwrap(s): + r""" + Given a multi-line string, return an unwrapped version. + + >>> wrapped = wrap(lorem_ipsum) + >>> wrapped.count('\n') + 20 + >>> unwrapped = unwrap(wrapped) + >>> unwrapped.count('\n') + 1 + >>> print(unwrapped) + Lorem ipsum dolor sit amet, consectetur adipiscing ... + Curabitur pretium tincidunt lacus. Nulla gravida orci ... + + """ + paragraphs = re.split(r'\n\n+', s) + cleaned = (para.replace('\n', ' ') for para in paragraphs) + return '\n'.join(cleaned) + + + + +class Splitter(object): + """object that will split a string with the given arguments for each call + + >>> s = Splitter(',') + >>> s('hello, world, this is your, master calling') + ['hello', ' world', ' this is your', ' master calling'] + """ + + def __init__(self, *args): + self.args = args + + def __call__(self, s): + return s.split(*self.args) + + +def indent(string, prefix=' ' * 4): + """ + >>> indent('foo') + ' foo' + """ + return prefix + string + + +class WordSet(tuple): + """ + Given an identifier, return the words that identifier represents, + whether in camel case, underscore-separated, etc. + + >>> WordSet.parse("camelCase") + ('camel', 'Case') + + >>> WordSet.parse("under_sep") + ('under', 'sep') + + Acronyms should be retained + + >>> WordSet.parse("firstSNL") + ('first', 'SNL') + + >>> WordSet.parse("you_and_I") + ('you', 'and', 'I') + + >>> WordSet.parse("A simple test") + ('A', 'simple', 'test') + + Multiple caps should not interfere with the first cap of another word. + + >>> WordSet.parse("myABCClass") + ('my', 'ABC', 'Class') + + The result is a WordSet, so you can get the form you need. + + >>> WordSet.parse("myABCClass").underscore_separated() + 'my_ABC_Class' + + >>> WordSet.parse('a-command').camel_case() + 'ACommand' + + >>> WordSet.parse('someIdentifier').lowered().space_separated() + 'some identifier' + + Slices of the result should return another WordSet. + + >>> WordSet.parse('taken-out-of-context')[1:].underscore_separated() + 'out_of_context' + + >>> WordSet.from_class_name(WordSet()).lowered().space_separated() + 'word set' + + >>> example = WordSet.parse('figured it out') + >>> example.headless_camel_case() + 'figuredItOut' + >>> example.dash_separated() + 'figured-it-out' + + """ + + _pattern = re.compile('([A-Z]?[a-z]+)|([A-Z]+(?![a-z]))') + + def capitalized(self): + return WordSet(word.capitalize() for word in self) + + def lowered(self): + return WordSet(word.lower() for word in self) + + def camel_case(self): + return ''.join(self.capitalized()) + + def headless_camel_case(self): + words = iter(self) + first = next(words).lower() + new_words = itertools.chain((first,), WordSet(words).camel_case()) + return ''.join(new_words) + + def underscore_separated(self): + return '_'.join(self) + + def dash_separated(self): + return '-'.join(self) + + def space_separated(self): + return ' '.join(self) + + def trim_right(self, item): + """ + Remove the item from the end of the set. + + >>> WordSet.parse('foo bar').trim_right('foo') + ('foo', 'bar') + >>> WordSet.parse('foo bar').trim_right('bar') + ('foo',) + >>> WordSet.parse('').trim_right('bar') + () + """ + return self[:-1] if self and self[-1] == item else self + + def trim_left(self, item): + """ + Remove the item from the beginning of the set. + + >>> WordSet.parse('foo bar').trim_left('foo') + ('bar',) + >>> WordSet.parse('foo bar').trim_left('bar') + ('foo', 'bar') + >>> WordSet.parse('').trim_left('bar') + () + """ + return self[1:] if self and self[0] == item else self + + def trim(self, item): + """ + >>> WordSet.parse('foo bar').trim('foo') + ('bar',) + """ + return self.trim_left(item).trim_right(item) + + def __getitem__(self, item): + result = super(WordSet, self).__getitem__(item) + if isinstance(item, slice): + result = WordSet(result) + return result + + @classmethod + def parse(cls, identifier): + matches = cls._pattern.finditer(identifier) + return WordSet(match.group(0) for match in matches) + + @classmethod + def from_class_name(cls, subject): + return cls.parse(subject.__class__.__name__) + + +# for backward compatibility +words = WordSet.parse + + +def simple_html_strip(s): + r""" + Remove HTML from the string `s`. + + >>> str(simple_html_strip('')) + '' + + >>> print(simple_html_strip('A <bold>stormy</bold> day in paradise')) + A stormy day in paradise + + >>> print(simple_html_strip('Somebody <!-- do not --> tell the truth.')) + Somebody tell the truth. + + >>> print(simple_html_strip('What about<br/>\nmultiple lines?')) + What about + multiple lines? + """ + html_stripper = re.compile('(<!--.*?-->)|(<[^>]*>)|([^<]+)', re.DOTALL) + texts = (match.group(3) or '' for match in html_stripper.finditer(s)) + return ''.join(texts) + + +class SeparatedValues(str): + """ + A string separated by a separator. Overrides __iter__ for getting + the values. + + >>> list(SeparatedValues('a,b,c')) + ['a', 'b', 'c'] + + Whitespace is stripped and empty values are discarded. + + >>> list(SeparatedValues(' a, b , c, ')) + ['a', 'b', 'c'] + """ + + separator = ',' + + def __iter__(self): + parts = self.split(self.separator) + return filter(None, (part.strip() for part in parts)) + + +class Stripper: + r""" + Given a series of lines, find the common prefix and strip it from them. + + >>> lines = [ + ... 'abcdefg\n', + ... 'abc\n', + ... 'abcde\n', + ... ] + >>> res = Stripper.strip_prefix(lines) + >>> res.prefix + 'abc' + >>> list(res.lines) + ['defg\n', '\n', 'de\n'] + + If no prefix is common, nothing should be stripped. + + >>> lines = [ + ... 'abcd\n', + ... '1234\n', + ... ] + >>> res = Stripper.strip_prefix(lines) + >>> res.prefix = '' + >>> list(res.lines) + ['abcd\n', '1234\n'] + """ + + def __init__(self, prefix, lines): + self.prefix = prefix + self.lines = map(self, lines) + + @classmethod + def strip_prefix(cls, lines): + prefix_lines, lines = itertools.tee(lines) + prefix = functools.reduce(cls.common_prefix, prefix_lines) + return cls(prefix, lines) + + def __call__(self, line): + if not self.prefix: + return line + null, prefix, rest = line.partition(self.prefix) + return rest + + @staticmethod + def common_prefix(s1, s2): + """ + Return the common prefix of two lines. + """ + index = min(len(s1), len(s2)) + while s1[:index] != s2[:index]: + index -= 1 + return s1[:index] + + +def remove_prefix(text, prefix): + """ + Remove the prefix from the text if it exists. + + >>> remove_prefix('underwhelming performance', 'underwhelming ') + 'performance' + + >>> remove_prefix('something special', 'sample') + 'something special' + """ + null, prefix, rest = text.rpartition(prefix) + return rest + + +def remove_suffix(text, suffix): + """ + Remove the suffix from the text if it exists. + + >>> remove_suffix('name.git', '.git') + 'name' + + >>> remove_suffix('something special', 'sample') + 'something special' + """ + rest, suffix, null = text.partition(suffix) + return rest + + +def normalize_newlines(text): + r""" + Replace alternate newlines with the canonical newline. + + >>> normalize_newlines('Lorem Ipsum\u2029') + 'Lorem Ipsum\n' + >>> normalize_newlines('Lorem Ipsum\r\n') + 'Lorem Ipsum\n' + >>> normalize_newlines('Lorem Ipsum\x85') + 'Lorem Ipsum\n' + """ + newlines = ['\r\n', '\r', '\n', '\u0085', '\u2028', '\u2029'] + pattern = '|'.join(newlines) + return re.sub(pattern, '\n', text) + + +def _nonblank(str): + return str and not str.startswith('#') + + +@functools.singledispatch +def yield_lines(iterable): + r""" + Yield valid lines of a string or iterable. + + >>> list(yield_lines('')) + [] + >>> list(yield_lines(['foo', 'bar'])) + ['foo', 'bar'] + >>> list(yield_lines('foo\nbar')) + ['foo', 'bar'] + >>> list(yield_lines('\nfoo\n#bar\nbaz #comment')) + ['foo', 'baz #comment'] + >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n'])) + ['foo', 'bar', 'baz', 'bing'] + """ + return itertools.chain.from_iterable(map(yield_lines, iterable)) + + +@yield_lines.register(str) +def _(text): + return filter(_nonblank, map(str.strip, text.splitlines())) + + +def drop_comment(line): + """ + Drop comments. + + >>> drop_comment('foo # bar') + 'foo' + + A hash without a space may be in a URL. + + >>> drop_comment('http://example.com/foo#bar') + 'http://example.com/foo#bar' + """ + return line.partition(' #')[0] + + +def join_continuation(lines): + r""" + Join lines continued by a trailing backslash. + + >>> list(join_continuation(['foo \\', 'bar', 'baz'])) + ['foobar', 'baz'] + >>> list(join_continuation(['foo \\', 'bar', 'baz'])) + ['foobar', 'baz'] + >>> list(join_continuation(['foo \\', 'bar \\', 'baz'])) + ['foobarbaz'] + + Not sure why, but... + The character preceeding the backslash is also elided. + + >>> list(join_continuation(['goo\\', 'dly'])) + ['godly'] + + A terrible idea, but... + If no line is available to continue, suppress the lines. + + >>> list(join_continuation(['foo', 'bar\\', 'baz\\'])) + ['foo'] + """ + lines = iter(lines) + for item in lines: + while item.endswith('\\'): + try: + item = item[:-2].strip() + next(lines) + except StopIteration: + return + yield item diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/INSTALLER b/setuptools/_vendor/more_itertools-8.8.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/setuptools/_vendor/more_itertools-8.8.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/LICENSE b/setuptools/_vendor/more_itertools-8.8.0.dist-info/LICENSE new file mode 100644 index 00000000..0a523bec --- /dev/null +++ b/setuptools/_vendor/more_itertools-8.8.0.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2012 Erik Rose + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/METADATA b/setuptools/_vendor/more_itertools-8.8.0.dist-info/METADATA new file mode 100644 index 00000000..bdaee655 --- /dev/null +++ b/setuptools/_vendor/more_itertools-8.8.0.dist-info/METADATA @@ -0,0 +1,462 @@ +Metadata-Version: 2.1 +Name: more-itertools +Version: 8.8.0 +Summary: More routines for operating on iterables, beyond itertools +Home-page: https://github.com/more-itertools/more-itertools +Author: Erik Rose +Author-email: erikrose@grinchcentral.com +License: MIT +Keywords: itertools,iterator,iteration,filter,peek,peekable,collate,chunk,chunked +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries +Requires-Python: >=3.5 +Description-Content-Type: text/x-rst + +============== +More Itertools +============== + +.. image:: https://readthedocs.org/projects/more-itertools/badge/?version=latest + :target: https://more-itertools.readthedocs.io/en/stable/ + +Python's ``itertools`` library is a gem - you can compose elegant solutions +for a variety of problems with the functions it provides. In ``more-itertools`` +we collect additional building blocks, recipes, and routines for working with +Python iterables. + ++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Grouping | `chunked <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.chunked>`_, | +| | `ichunked <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ichunked>`_, | +| | `sliced <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sliced>`_, | +| | `distribute <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distribute>`_, | +| | `divide <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.divide>`_, | +| | `split_at <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_at>`_, | +| | `split_before <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_before>`_, | +| | `split_after <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_after>`_, | +| | `split_into <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_into>`_, | +| | `split_when <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_when>`_, | +| | `bucket <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.bucket>`_, | +| | `unzip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unzip>`_, | +| | `grouper <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.grouper>`_, | +| | `partition <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.partition>`_ | ++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Lookahead and lookback | `spy <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.spy>`_, | +| | `peekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.peekable>`_, | +| | `seekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.seekable>`_ | ++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Windowing | `windowed <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.windowed>`_, | +| | `substrings <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.substrings>`_, | +| | `substrings_indexes <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.substrings_indexes>`_, | +| | `stagger <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.stagger>`_, | +| | `windowed_complete <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.windowed_complete>`_, | +| | `pairwise <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.pairwise>`_ | ++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Augmenting | `count_cycle <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.count_cycle>`_, | +| | `intersperse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.intersperse>`_, | +| | `padded <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.padded>`_, | +| | `mark_ends <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.mark_ends>`_, | +| | `repeat_last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.repeat_last>`_, | +| | `adjacent <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.adjacent>`_, | +| | `groupby_transform <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.groupby_transform>`_, | +| | `padnone <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.padnone>`_, | +| | `ncycles <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ncycles>`_ | ++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Combining | `collapse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.collapse>`_, | +| | `sort_together <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sort_together>`_, | +| | `interleave <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave>`_, | +| | `interleave_longest <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave_longest>`_, | +| | `zip_offset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_offset>`_, | +| | `zip_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_equal>`_, | +| | `dotproduct <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.dotproduct>`_, | +| | `convolve <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.convolve>`_, | +| | `flatten <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.flatten>`_, | +| | `roundrobin <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.roundrobin>`_, | +| | `prepend <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.prepend>`_, | +| | `value_chain <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.value_chain>`_ | ++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Summarizing | `ilen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ilen>`_, | +| | `unique_to_each <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_to_each>`_, | +| | `sample <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sample>`_, | +| | `consecutive_groups <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consecutive_groups>`_, | +| | `run_length <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.run_length>`_, | +| | `map_reduce <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.map_reduce>`_, | +| | `exactly_n <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.exactly_n>`_, | +| | `is_sorted <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.is_sorted>`_, | +| | `all_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.all_equal>`_, | +| | `all_unique <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.all_unique>`_, | +| | `first_true <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first_true>`_, | +| | `quantify <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.quantify>`_ | ++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Selecting | `islice_extended <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.islice_extended>`_, | +| | `first <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first>`_, | +| | `last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.last>`_, | +| | `one <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.one>`_, | +| | `only <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.only>`_, | +| | `strip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.strip>`_, | +| | `lstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.lstrip>`_, | +| | `rstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rstrip>`_, | +| | `filter_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.filter_except>`_ | +| | `map_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.map_except>`_ | +| | `nth_or_last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_or_last>`_, | +| | `nth <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth>`_, | +| | `take <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.take>`_, | +| | `tail <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tail>`_, | +| | `unique_everseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertoo ls.unique_everseen>`_, | +| | `unique_justseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_justseen>`_ | ++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Combinatorics | `distinct_permutations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_permutations>`_, | +| | `distinct_combinations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_combinations>`_, | +| | `circular_shifts <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.circular_shifts>`_, | +| | `partitions <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.partitions>`_, | +| | `set_partitions <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.set_partitions>`_, | +| | `product_index <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.product_index>`_, | +| | `combination_index <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.combination_index>`_, | +| | `permutation_index <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.permutation_index>`_, | +| | `powerset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.powerset>`_, | +| | `random_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_product>`_, | +| | `random_permutation <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_permutation>`_, | +| | `random_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination>`_, | +| | `random_combination_with_replacement <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination_with_replacement>`_, | +| | `nth_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_product>`_ | +| | `nth_permutation <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_permutation>`_ | +| | `nth_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_combination>`_ | ++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Wrapping | `always_iterable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_iterable>`_, | +| | `always_reversible <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_reversible>`_, | +| | `countable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.countable>`_, | +| | `consumer <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consumer>`_, | +| | `with_iter <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.with_iter>`_, | +| | `iter_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iter_except>`_ | ++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| Others | `locate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.locate>`_, | +| | `rlocate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rlocate>`_, | +| | `replace <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.replace>`_, | +| | `numeric_range <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.numeric_range>`_, | +| | `side_effect <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.side_effect>`_, | +| | `iterate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iterate>`_, | +| | `difference <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.difference>`_, | +| | `make_decorator <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.make_decorator>`_, | +| | `SequenceView <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.SequenceView>`_, | +| | `time_limited <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.time_limited>`_, | +| | `consume <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consume>`_, | +| | `tabulate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tabulate>`_, | +| | `repeatfunc <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.repeatfunc>`_ | ++------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + + +Getting started +=============== + +To get started, install the library with `pip <https://pip.pypa.io/en/stable/>`_: + +.. code-block:: shell + + pip install more-itertools + +The recipes from the `itertools docs <https://docs.python.org/3/library/itertools.html#itertools-recipes>`_ +are included in the top-level package: + +.. code-block:: python + + >>> from more_itertools import flatten + >>> iterable = [(0, 1), (2, 3)] + >>> list(flatten(iterable)) + [0, 1, 2, 3] + +Several new recipes are available as well: + +.. code-block:: python + + >>> from more_itertools import chunked + >>> iterable = [0, 1, 2, 3, 4, 5, 6, 7, 8] + >>> list(chunked(iterable, 3)) + [[0, 1, 2], [3, 4, 5], [6, 7, 8]] + + >>> from more_itertools import spy + >>> iterable = (x * x for x in range(1, 6)) + >>> head, iterable = spy(iterable, n=3) + >>> list(head) + [1, 4, 9] + >>> list(iterable) + [1, 4, 9, 16, 25] + + + +For the full listing of functions, see the `API documentation <https://more-itertools.readthedocs.io/en/stable/api.html>`_. + + +Links elsewhere +=============== + +Blog posts about ``more-itertools``: + +* `Yo, I heard you like decorators <https://www.bbayles.com/index/decorator_factory>`__ +* `Tour of Python Itertools <https://martinheinz.dev/blog/16>`__ (`Alternate <https://dev.to/martinheinz/tour-of-python-itertools-4122>`__) + + +Development +=========== + +``more-itertools`` is maintained by `@erikrose <https://github.com/erikrose>`_ +and `@bbayles <https://github.com/bbayles>`_, with help from `many others <https://github.com/more-itertools/more-itertools/graphs/contributors>`_. +If you have a problem or suggestion, please file a bug or pull request in this +repository. Thanks for contributing! + + +Version History +=============== + + + :noindex: + +8.8.0 +----- + +* New functions + * countable (thanks to krzysieq) + +* Changes to existing functions + * split_before was updated to handle empy collections (thanks to TiunovNN) + * unique_everseen got a performance boost (thanks to Numerlor) + * The type hint for value_chain was corrected (thanks to vr2262) + +8.7.0 +----- + +* New functions + * convolve (from the Python itertools docs) + * product_index, combination_index, and permutation_index (thanks to N8Brooks) + * value_chain (thanks to jenstroeger) + +* Changes to existing functions + * distinct_combinations now uses a non-recursive algorithm (thanks to knutdrand) + * pad_none is now the preferred name for padnone, though the latter remains available. + * pairwise will now use the Python standard library implementation on Python 3.10+ + * sort_together now accepts a ``key`` argument (thanks to brianmaissy) + * seekable now has a ``peek`` method, and can indicate whether the iterator it's wrapping is exhausted (thanks to gsakkis) + * time_limited can now indicate whether its iterator has expired (thanks to roysmith) + * The implementation of unique_everseen was improved (thanks to plammens) + +* Other changes: + * Various documentation updates (thanks to cthoyt, Evantm, and cyphase) + +8.6.0 +----- + +* New itertools + * all_unique (thanks to brianmaissy) + * nth_product and nth_permutation (thanks to N8Brooks) + +* Changes to existing itertools + * chunked and sliced now accept a ``strict`` parameter (thanks to shlomif and jtwool) + +* Other changes + * Python 3.5 has reached its end of life and is no longer supported. + * Python 3.9 is officially supported. + * Various documentation fixes (thanks to timgates42) + +8.5.0 +----- + +* New itertools + * windowed_complete (thanks to MarcinKonowalczyk) + +* Changes to existing itertools: + * The is_sorted implementation was improved (thanks to cool-RR) + * The groupby_transform now accepts a ``reducefunc`` parameter. + * The last implementation was improved (thanks to brianmaissy) + +* Other changes + * Various documentation fixes (thanks to craigrosie, samuelstjean, PiCT0) + * The tests for distinct_combinations were improved (thanks to Minabsapi) + * Automated tests now run on GitHub Actions. All commits now check: + * That unit tests pass + * That the examples in docstrings work + * That test coverage remains high (using `coverage`) + * For linting errors (using `flake8`) + * For consistent style (using `black`) + * That the type stubs work (using `mypy`) + * That the docs build correctly (using `sphinx`) + * That packages build correctly (using `twine`) + +8.4.0 +----- + +* New itertools + * mark_ends (thanks to kalekundert) + * is_sorted + +* Changes to existing itertools: + * islice_extended can now be used with real slices (thanks to cool-RR) + * The implementations for filter_except and map_except were improved (thanks to SergBobrovsky) + +* Other changes + * Automated tests now enforce code style (using `black <https://github.com/psf/black>`__) + * The various signatures of islice_extended and numeric_range now appear in the docs (thanks to dsfulf) + * The test configuration for mypy was updated (thanks to blueyed) + + +8.3.0 +----- + +* New itertools + * zip_equal (thanks to frankier and alexmojaki) + +* Changes to existing itertools: + * split_at, split_before, split_after, and split_when all got a ``maxsplit`` paramter (thanks to jferard and ilai-deutel) + * split_at now accepts a ``keep_separator`` parameter (thanks to jferard) + * distinct_permutations can now generate ``r``-length permutations (thanks to SergBobrovsky and ilai-deutel) + * The windowed implementation was improved (thanks to SergBobrovsky) + * The spy implementation was improved (thanks to has2k1) + +* Other changes + * Type stubs are now tested with ``stubtest`` (thanks to ilai-deutel) + * Tests now run with ``python -m unittest`` instead of ``python setup.py test`` (thanks to jdufresne) + +8.2.0 +----- + +* Bug fixes + * The .pyi files for typing were updated. (thanks to blueyed and ilai-deutel) + +* Changes to existing itertools: + * numeric_range now behaves more like the built-in range. (thanks to jferard) + * bucket now allows for enumerating keys. (thanks to alexchandel) + * sliced now should now work for numpy arrays. (thanks to sswingle) + * seekable now has a ``maxlen`` parameter. + +8.1.0 +----- + +* Bug fixes + * partition works with ``pred=None`` again. (thanks to MSeifert04) + +* New itertools + * sample (thanks to tommyod) + * nth_or_last (thanks to d-ryzhikov) + +* Changes to existing itertools: + * The implementation for divide was improved. (thanks to jferard) + +8.0.2 +----- + +* Bug fixes + * The type stub files are now part of the wheel distribution (thanks to keisheiled) + +8.0.1 +----- + +* Bug fixes + * The type stub files now work for functions imported from the + root package (thanks to keisheiled) + +8.0.0 +----- + +* New itertools and other additions + * This library now ships type hints for use with mypy. + (thanks to ilai-deutel for the implementation, and to gabbard and fmagin for assistance) + * split_when (thanks to jferard) + * repeat_last (thanks to d-ryzhikov) + +* Changes to existing itertools: + * The implementation for set_partitions was improved. (thanks to jferard) + * partition was optimized for expensive predicates. (thanks to stevecj) + * unique_everseen and groupby_transform were re-factored. (thanks to SergBobrovsky) + * The implementation for difference was improved. (thanks to Jabbey92) + +* Other changes + * Python 3.4 has reached its end of life and is no longer supported. + * Python 3.8 is officially supported. (thanks to jdufresne) + * The ``collate`` function has been deprecated. + It raises a ``DeprecationWarning`` if used, and will be removed in a future release. + * one and only now provide more informative error messages. (thanks to gabbard) + * Unit tests were moved outside of the main package (thanks to jdufresne) + * Various documentation fixes (thanks to kriomant, gabbard, jdufresne) + + +7.2.0 +----- + +* New itertools + * distinct_combinations + * set_partitions (thanks to kbarrett) + * filter_except + * map_except + +7.1.0 +----- + +* New itertools + * ichunked (thanks davebelais and youtux) + * only (thanks jaraco) + +* Changes to existing itertools: + * numeric_range now supports ranges specified by + ``datetime.datetime`` and ``datetime.timedelta`` objects (thanks to MSeifert04 for tests). + * difference now supports an *initial* keyword argument. + + +* Other changes + * Various documentation fixes (thanks raimon49, pylang) + +7.0.0 +----- + +* New itertools: + * time_limited + * partitions (thanks to rominf and Saluev) + * substrings_indexes (thanks to rominf) + +* Changes to existing itertools: + * collapse now treats ``bytes`` objects the same as ``str`` objects. (thanks to Sweenpet) + +The major version update is due to the change in the default behavior of +collapse. It now treats ``bytes`` objects the same as ``str`` objects. +This aligns its behavior with always_iterable. + +.. code-block:: python + + >>> from more_itertools import collapse + >>> iterable = [[1, 2], b'345', [6]] + >>> print(list(collapse(iterable))) + [1, 2, b'345', 6] + +6.0.0 +----- + +* Major changes: + * Python 2.7 is no longer supported. The 5.0.0 release will be the last + version targeting Python 2.7. + * All future releases will target the active versions of Python 3. + As of 2019, those are Python 3.4 and above. + * The ``six`` library is no longer a dependency. + * The accumulate function is no longer part of this library. You + may import a better version from the standard ``itertools`` module. + +* Changes to existing itertools: + * The order of the parameters in grouper have changed to match + the latest recipe in the itertools documentation. Use of the old order + will be supported in this release, but emit a ``DeprecationWarning``. + The legacy behavior will be dropped in a future release. (thanks to jaraco) + * distinct_permutations was improved (thanks to jferard - see also `permutations with unique values <https://stackoverflow.com/questions/6284396/permutations-with-unique-values>`_ at StackOverflow.) + * An unused parameter was removed from substrings. (thanks to pylang) + +* Other changes: + * The docs for unique_everseen were improved. (thanks to jferard and MSeifert04) + * Several Python 2-isms were removed. (thanks to jaraco, MSeifert04, and hugovk) + + diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD b/setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD new file mode 100644 index 00000000..36ffbd86 --- /dev/null +++ b/setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD @@ -0,0 +1,17 @@ +more_itertools-8.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+more_itertools-8.8.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053
+more_itertools-8.8.0.dist-info/METADATA,sha256=Gke9w7RnfiAvveik_iBBrzd0RjrDhsQ8uRYNBJdo4qQ,40482
+more_itertools-8.8.0.dist-info/RECORD,,
+more_itertools-8.8.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+more_itertools-8.8.0.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92
+more_itertools-8.8.0.dist-info/top_level.txt,sha256=fAuqRXu9LPhxdB9ujJowcFOu1rZ8wzSpOW9_jlKis6M,15
+more_itertools/__init__.py,sha256=C7sXffHTXM3P-iaLPPfqfmDoxOflQMJLcM7ed9p3jak,82
+more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43
+more_itertools/__pycache__/__init__.cpython-310.pyc,,
+more_itertools/__pycache__/more.cpython-310.pyc,,
+more_itertools/__pycache__/recipes.cpython-310.pyc,,
+more_itertools/more.py,sha256=DlZa8v6JihVwfQ5zHidOA-xDE0orcQIUyxVnCaUoDKE,117968
+more_itertools/more.pyi,sha256=r32pH2raBC1zih3evK4fyvAXvrUamJqc6dgV7QCRL_M,14977
+more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+more_itertools/recipes.py,sha256=UkNkrsZyqiwgLHANBTmvMhCvaNSvSNYhyOpz_Jc55DY,16256
+more_itertools/recipes.pyi,sha256=9BpeKd5_qalYVSnuHfqPSCfoGgqnQY2Xu9pNwrDlHU8,3551
diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/REQUESTED b/setuptools/_vendor/more_itertools-8.8.0.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/more_itertools-8.8.0.dist-info/REQUESTED diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/WHEEL b/setuptools/_vendor/more_itertools-8.8.0.dist-info/WHEEL new file mode 100644 index 00000000..385faab0 --- /dev/null +++ b/setuptools/_vendor/more_itertools-8.8.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/top_level.txt b/setuptools/_vendor/more_itertools-8.8.0.dist-info/top_level.txt new file mode 100644 index 00000000..a5035bef --- /dev/null +++ b/setuptools/_vendor/more_itertools-8.8.0.dist-info/top_level.txt @@ -0,0 +1 @@ +more_itertools diff --git a/setuptools/_vendor/more_itertools/more.py b/setuptools/_vendor/more_itertools/more.py index 0f7d282a..e6fca4d4 100644 --- a/setuptools/_vendor/more_itertools/more.py +++ b/setuptools/_vendor/more_itertools/more.py @@ -2,7 +2,6 @@ import warnings from collections import Counter, defaultdict, deque, abc from collections.abc import Sequence -from concurrent.futures import ThreadPoolExecutor from functools import partial, reduce, wraps from heapq import merge, heapify, heapreplace, heappop from itertools import ( @@ -3454,7 +3453,7 @@ class callback_iter: self._aborted = False self._future = None self._wait_seconds = wait_seconds - self._executor = ThreadPoolExecutor(max_workers=1) + self._executor = __import__("concurrent.futures").futures.ThreadPoolExecutor(max_workers=1) self._iterator = self._reader() def __enter__(self): diff --git a/setuptools/_vendor/nspektr-0.3.0.dist-info/INSTALLER b/setuptools/_vendor/nspektr-0.3.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/setuptools/_vendor/nspektr-0.3.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/setuptools/_vendor/nspektr-0.3.0.dist-info/LICENSE b/setuptools/_vendor/nspektr-0.3.0.dist-info/LICENSE new file mode 100644 index 00000000..353924be --- /dev/null +++ b/setuptools/_vendor/nspektr-0.3.0.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/setuptools/_vendor/nspektr-0.3.0.dist-info/METADATA b/setuptools/_vendor/nspektr-0.3.0.dist-info/METADATA new file mode 100644 index 00000000..aadc3749 --- /dev/null +++ b/setuptools/_vendor/nspektr-0.3.0.dist-info/METADATA @@ -0,0 +1,57 @@ +Metadata-Version: 2.1 +Name: nspektr +Version: 0.3.0 +Summary: package inspector +Home-page: https://github.com/jaraco/nspektr +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.7 +License-File: LICENSE +Requires-Dist: jaraco.context +Requires-Dist: jaraco.functools +Requires-Dist: more-itertools +Requires-Dist: packaging +Requires-Dist: importlib-metadata (>=3.6) ; python_version < "3.10" +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/nspektr.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/nspektr.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/nspektr + +.. image:: https://github.com/jaraco/nspektr/workflows/tests/badge.svg + :target: https://github.com/jaraco/nspektr/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. .. image:: https://readthedocs.org/projects/skeleton/badge/?version=latest +.. :target: https://skeleton.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2022-informational + :target: https://blog.jaraco.com/skeleton + + diff --git a/setuptools/_vendor/nspektr-0.3.0.dist-info/RECORD b/setuptools/_vendor/nspektr-0.3.0.dist-info/RECORD new file mode 100644 index 00000000..5e5de5eb --- /dev/null +++ b/setuptools/_vendor/nspektr-0.3.0.dist-info/RECORD @@ -0,0 +1,11 @@ +nspektr-0.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+nspektr-0.3.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+nspektr-0.3.0.dist-info/METADATA,sha256=X0stV4vwFBDBxvzhBl4kAHVdGWPIjEitqAuTJItcQH0,2162
+nspektr-0.3.0.dist-info/RECORD,,
+nspektr-0.3.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+nspektr-0.3.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
+nspektr-0.3.0.dist-info/top_level.txt,sha256=uEA20Ixo04XS3wOIt5-Jk5ZuMkBrtlleFipRr8Y1SjQ,8
+nspektr/__init__.py,sha256=d6-d-ZlGAQQP-MEi_NZMiyn2vLbq8Hw3HxICgm3X0Q8,3949
+nspektr/__pycache__/__init__.cpython-310.pyc,,
+nspektr/__pycache__/_compat.cpython-310.pyc,,
+nspektr/_compat.py,sha256=2QoozYhuhgow_NMUATmhoM-yppBV3jiZYQgdiP-ww0s,582
diff --git a/setuptools/_vendor/nspektr-0.3.0.dist-info/REQUESTED b/setuptools/_vendor/nspektr-0.3.0.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/nspektr-0.3.0.dist-info/REQUESTED diff --git a/setuptools/_vendor/nspektr-0.3.0.dist-info/WHEEL b/setuptools/_vendor/nspektr-0.3.0.dist-info/WHEEL new file mode 100644 index 00000000..becc9a66 --- /dev/null +++ b/setuptools/_vendor/nspektr-0.3.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/setuptools/_vendor/nspektr-0.3.0.dist-info/top_level.txt b/setuptools/_vendor/nspektr-0.3.0.dist-info/top_level.txt new file mode 100644 index 00000000..b10ef50a --- /dev/null +++ b/setuptools/_vendor/nspektr-0.3.0.dist-info/top_level.txt @@ -0,0 +1 @@ +nspektr diff --git a/setuptools/_vendor/nspektr/__init__.py b/setuptools/_vendor/nspektr/__init__.py new file mode 100644 index 00000000..938bbdb9 --- /dev/null +++ b/setuptools/_vendor/nspektr/__init__.py @@ -0,0 +1,145 @@ +import itertools +import functools +import contextlib + +from setuptools.extern.packaging.requirements import Requirement +from setuptools.extern.packaging.version import Version +from setuptools.extern.more_itertools import always_iterable +from setuptools.extern.jaraco.context import suppress +from setuptools.extern.jaraco.functools import apply + +from ._compat import metadata, repair_extras + + +def resolve(req: Requirement) -> metadata.Distribution: + """ + Resolve the requirement to its distribution. + + Ignore exception detail for Python 3.9 compatibility. + + >>> resolve(Requirement('pytest<3')) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + importlib.metadata.PackageNotFoundError: No package metadata was found for pytest<3 + """ + dist = metadata.distribution(req.name) + if not req.specifier.contains(Version(dist.version), prereleases=True): + raise metadata.PackageNotFoundError(str(req)) + dist.extras = req.extras # type: ignore + return dist + + +@apply(bool) +@suppress(metadata.PackageNotFoundError) +def is_satisfied(req: Requirement): + return resolve(req) + + +unsatisfied = functools.partial(itertools.filterfalse, is_satisfied) + + +class NullMarker: + @classmethod + def wrap(cls, req: Requirement): + return req.marker or cls() + + def evaluate(self, *args, **kwargs): + return True + + +def find_direct_dependencies(dist, extras=None): + """ + Find direct, declared dependencies for dist. + """ + simple = ( + req + for req in map(Requirement, always_iterable(dist.requires)) + if NullMarker.wrap(req).evaluate(dict(extra=None)) + ) + extra_deps = ( + req + for req in map(Requirement, always_iterable(dist.requires)) + for extra in always_iterable(getattr(dist, 'extras', extras)) + if NullMarker.wrap(req).evaluate(dict(extra=extra)) + ) + return itertools.chain(simple, extra_deps) + + +def traverse(items, visit): + """ + Given an iterable of items, traverse the items. + + For each item, visit is called to return any additional items + to include in the traversal. + """ + while True: + try: + item = next(items) + except StopIteration: + return + yield item + items = itertools.chain(items, visit(item)) + + +def find_req_dependencies(req): + with contextlib.suppress(metadata.PackageNotFoundError): + dist = resolve(req) + yield from find_direct_dependencies(dist) + + +def find_dependencies(dist, extras=None): + """ + Find all reachable dependencies for dist. + + dist is an importlib.metadata.Distribution (or similar). + TODO: create a suitable protocol for type hint. + + >>> deps = find_dependencies(resolve(Requirement('nspektr'))) + >>> all(isinstance(dep, Requirement) for dep in deps) + True + >>> not any('pytest' in str(dep) for dep in deps) + True + >>> test_deps = find_dependencies(resolve(Requirement('nspektr[testing]'))) + >>> any('pytest' in str(dep) for dep in test_deps) + True + """ + + def visit(req, seen=set()): + if req in seen: + return () + seen.add(req) + return find_req_dependencies(req) + + return traverse(find_direct_dependencies(dist, extras), visit) + + +class Unresolved(Exception): + def __iter__(self): + return iter(self.args[0]) + + +def missing(ep): + """ + Generate the unresolved dependencies (if any) of ep. + """ + return unsatisfied(find_dependencies(ep.dist, repair_extras(ep.extras))) + + +def check(ep): + """ + >>> ep, = metadata.entry_points(group='console_scripts', name='pip') + >>> check(ep) + >>> dist = metadata.distribution('nspektr') + + Since 'docs' extras are not installed, requesting them should fail. + + >>> ep = metadata.EntryPoint( + ... group=None, name=None, value='nspektr [docs]')._for(dist) + >>> check(ep) + Traceback (most recent call last): + ... + nspektr.Unresolved: [...] + """ + missed = list(missing(ep)) + if missed: + raise Unresolved(missed) diff --git a/setuptools/_vendor/nspektr/_compat.py b/setuptools/_vendor/nspektr/_compat.py new file mode 100644 index 00000000..3278379a --- /dev/null +++ b/setuptools/_vendor/nspektr/_compat.py @@ -0,0 +1,21 @@ +import contextlib +import sys + + +if sys.version_info >= (3, 10): + import importlib.metadata as metadata +else: + import setuptools.extern.importlib_metadata as metadata # type: ignore # noqa: F401 + + +def repair_extras(extras): + """ + Repair extras that appear as match objects. + + python/importlib_metadata#369 revealed a flaw in the EntryPoint + implementation. This function wraps the extras to ensure + they are proper strings even on older implementations. + """ + with contextlib.suppress(AttributeError): + return list(item.group(0) for item in extras) + return extras diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/INSTALLER b/setuptools/_vendor/ordered_set-3.1.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/setuptools/_vendor/ordered_set-3.1.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/METADATA b/setuptools/_vendor/ordered_set-3.1.1.dist-info/METADATA new file mode 100644 index 00000000..db6e12f2 --- /dev/null +++ b/setuptools/_vendor/ordered_set-3.1.1.dist-info/METADATA @@ -0,0 +1,157 @@ +Metadata-Version: 2.1 +Name: ordered-set +Version: 3.1.1 +Summary: A MutableSet that remembers its order, so that every entry has an index. +Home-page: https://github.com/LuminosoInsight/ordered-set +Maintainer: Robyn Speer +Maintainer-email: rspeer@luminoso.com +License: MIT-LICENSE +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=2.7 +Description-Content-Type: text/markdown +License-File: MIT-LICENSE + +[](https://travis-ci.org/LuminosoInsight/ordered-set) +[](https://codecov.io/github/LuminosoInsight/ordered-set?branch=master) +[](https://pypi.python.org/pypi/ordered-set) + +An OrderedSet is a mutable data structure that is a hybrid of a list and a set. +It remembers the order of its entries, and every entry has an index number that +can be looked up. + + +## Usage examples + +An OrderedSet is created and used like a set: + + >>> from ordered_set import OrderedSet + + >>> letters = OrderedSet('abracadabra') + + >>> letters + OrderedSet(['a', 'b', 'r', 'c', 'd']) + + >>> 'r' in letters + True + +It is efficient to find the index of an entry in an OrderedSet, or find an +entry by its index. To help with this use case, the `.add()` method returns +the index of the added item, whether it was already in the set or not. + + >>> letters.index('r') + 2 + + >>> letters[2] + 'r' + + >>> letters.add('r') + 2 + + >>> letters.add('x') + 5 + +OrderedSets implement the union (`|`), intersection (`&`), and difference (`-`) +operators like sets do. + + >>> letters |= OrderedSet('shazam') + + >>> letters + OrderedSet(['a', 'b', 'r', 'c', 'd', 'x', 's', 'h', 'z', 'm']) + + >>> letters & set('aeiou') + OrderedSet(['a']) + + >>> letters -= 'abcd' + + >>> letters + OrderedSet(['r', 'x', 's', 'h', 'z', 'm']) + +The `__getitem__()` and `index()` methods have been extended to accept any +iterable except a string, returning a list, to perform NumPy-like "fancy +indexing". + + >>> letters = OrderedSet('abracadabra') + + >>> letters[[0, 2, 3]] + ['a', 'r', 'c'] + + >>> letters.index(['a', 'r', 'c']) + [0, 2, 3] + +OrderedSet implements `__getstate__` and `__setstate__` so it can be pickled, +and implements the abstract base classes `collections.MutableSet` and +`collections.Sequence`. + + +## Interoperability with NumPy and Pandas + +An OrderedSet can be used as a bi-directional mapping between a sparse +vocabulary and dense index numbers. As of version 3.1, it accepts NumPy arrays +of index numbers as well as lists. + +This combination of features makes OrderedSet a simple implementation of many +of the things that `pandas.Index` is used for, and many of its operations are +faster than the equivalent pandas operations. + +For further compatibility with pandas.Index, `get_loc` (the pandas method for +looking up a single index) and `get_indexer` (the pandas method for fancy +indexing in reverse) are both aliases for `index` (which handles both cases +in OrderedSet). + + +## Type hinting +To use type hinting features install `ordered-set-stubs` package from +[PyPI](https://pypi.org/project/ordered-set-stubs/): + + $ pip install ordered-set-stubs + + +## Authors + +OrderedSet was implemented by Robyn Speer. Jon Crall contributed changes and +tests to make it fit the Python set API. + + +## Comparisons + +The original implementation of OrderedSet was a [recipe posted to ActiveState +Recipes][recipe] by Raymond Hettiger, released under the MIT license. + +[recipe]: https://code.activestate.com/recipes/576694-orderedset/ + +Hettiger's implementation kept its content in a doubly-linked list referenced by a +dict. As a result, looking up an item by its index was an O(N) operation, while +deletion was O(1). + +This version makes different trade-offs for the sake of efficient lookups. Its +content is a standard Python list instead of a doubly-linked list. This +provides O(1) lookups by index at the expense of O(N) deletion, as well as +slightly faster iteration. + +In Python 3.6 and later, the built-in `dict` type is inherently ordered. If you +ignore the dictionary values, that also gives you a simple ordered set, with +fast O(1) insertion, deletion, iteration and membership testing. However, `dict` +does not provide the list-like random access features of OrderedSet. You +would have to convert it to a list in O(N) to look up the index of an entry or +look up an entry by its index. + + +## Compatibility + +OrderedSet is automatically tested on Python 2.7, 3.4, 3.5, 3.6, and 3.7. +We've checked more informally that it works on PyPy and PyPy3. + + diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/MIT-LICENSE b/setuptools/_vendor/ordered_set-3.1.1.dist-info/MIT-LICENSE new file mode 100644 index 00000000..25117ef4 --- /dev/null +++ b/setuptools/_vendor/ordered_set-3.1.1.dist-info/MIT-LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2018 Luminoso Technologies, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD b/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD new file mode 100644 index 00000000..89579a07 --- /dev/null +++ b/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD @@ -0,0 +1,9 @@ +__pycache__/ordered_set.cpython-310.pyc,,
+ordered_set-3.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+ordered_set-3.1.1.dist-info/METADATA,sha256=uGvfFaNmhcl69lGdHmyOXc30N3U6Jn8DByfh_VHEPpw,5359
+ordered_set-3.1.1.dist-info/MIT-LICENSE,sha256=TvRE7qUSUBcd0ols7wgNf3zDEEJWW7kv7WDRySrMBBE,1071
+ordered_set-3.1.1.dist-info/RECORD,,
+ordered_set-3.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ordered_set-3.1.1.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110
+ordered_set-3.1.1.dist-info/top_level.txt,sha256=NTY2_aDi1Do9fl3Z9EmWPxasFkUeW2dzO2D3RDx5CfM,12
+ordered_set.py,sha256=dbaCcs27dyN9gnMWGF5nA_BrVn6Q-NrjKYJpV9_fgBs,15130
diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/REQUESTED b/setuptools/_vendor/ordered_set-3.1.1.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/ordered_set-3.1.1.dist-info/REQUESTED diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL b/setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL new file mode 100644 index 00000000..0b18a281 --- /dev/null +++ b/setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/top_level.txt b/setuptools/_vendor/ordered_set-3.1.1.dist-info/top_level.txt new file mode 100644 index 00000000..1c191eef --- /dev/null +++ b/setuptools/_vendor/ordered_set-3.1.1.dist-info/top_level.txt @@ -0,0 +1 @@ +ordered_set diff --git a/setuptools/_vendor/packaging-21.3.dist-info/INSTALLER b/setuptools/_vendor/packaging-21.3.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/setuptools/_vendor/packaging-21.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/setuptools/_vendor/packaging-21.3.dist-info/LICENSE b/setuptools/_vendor/packaging-21.3.dist-info/LICENSE new file mode 100644 index 00000000..6f62d44e --- /dev/null +++ b/setuptools/_vendor/packaging-21.3.dist-info/LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made +under the terms of *both* these licenses. diff --git a/setuptools/_vendor/packaging-21.3.dist-info/LICENSE.APACHE b/setuptools/_vendor/packaging-21.3.dist-info/LICENSE.APACHE new file mode 100644 index 00000000..f433b1a5 --- /dev/null +++ b/setuptools/_vendor/packaging-21.3.dist-info/LICENSE.APACHE @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/setuptools/_vendor/packaging-21.3.dist-info/LICENSE.BSD b/setuptools/_vendor/packaging-21.3.dist-info/LICENSE.BSD new file mode 100644 index 00000000..42ce7b75 --- /dev/null +++ b/setuptools/_vendor/packaging-21.3.dist-info/LICENSE.BSD @@ -0,0 +1,23 @@ +Copyright (c) Donald Stufft and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/setuptools/_vendor/packaging-21.3.dist-info/METADATA b/setuptools/_vendor/packaging-21.3.dist-info/METADATA new file mode 100644 index 00000000..358ace53 --- /dev/null +++ b/setuptools/_vendor/packaging-21.3.dist-info/METADATA @@ -0,0 +1,453 @@ +Metadata-Version: 2.1 +Name: packaging +Version: 21.3 +Summary: Core utilities for Python packages +Home-page: https://github.com/pypa/packaging +Author: Donald Stufft and individual contributors +Author-email: donald@stufft.io +License: BSD-2-Clause or Apache-2.0 +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: LICENSE.APACHE +License-File: LICENSE.BSD +Requires-Dist: pyparsing (!=3.0.5,>=2.0.2) + +packaging +========= + +.. start-intro + +Reusable core utilities for various Python Packaging +`interoperability specifications <https://packaging.python.org/specifications/>`_. + +This library provides utilities that implement the interoperability +specifications which have clearly one correct behaviour (eg: :pep:`440`) +or benefit greatly from having a single shared implementation (eg: :pep:`425`). + +.. end-intro + +The ``packaging`` project includes the following: version handling, specifiers, +markers, requirements, tags, utilities. + +Documentation +------------- + +The `documentation`_ provides information and the API for the following: + +- Version Handling +- Specifiers +- Markers +- Requirements +- Tags +- Utilities + +Installation +------------ + +Use ``pip`` to install these utilities:: + + pip install packaging + +Discussion +---------- + +If you run into bugs, you can file them in our `issue tracker`_. + +You can also join ``#pypa`` on Freenode to ask questions or get involved. + + +.. _`documentation`: https://packaging.pypa.io/ +.. _`issue tracker`: https://github.com/pypa/packaging/issues + + +Code of Conduct +--------------- + +Everyone interacting in the packaging project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + +Contributing +------------ + +The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as +well as how to report a potential security issue. The documentation for this +project also covers information about `project development`_ and `security`_. + +.. _`project development`: https://packaging.pypa.io/en/latest/development/ +.. _`security`: https://packaging.pypa.io/en/latest/security/ + +Project History +--------------- + +Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for +recent changes and project history. + +.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ + +Changelog +--------- + +21.3 - 2021-11-17 +~~~~~~~~~~~~~~~~~ + +* Add a ``pp3-none-any`` tag (`#311 <https://github.com/pypa/packaging/issues/311>`__) +* Replace the blank pyparsing 3 exclusion with a 3.0.5 exclusion (`#481 <https://github.com/pypa/packaging/issues/481>`__, `#486 <https://github.com/pypa/packaging/issues/486>`__) +* Fix a spelling mistake (`#479 <https://github.com/pypa/packaging/issues/479>`__) + +21.2 - 2021-10-29 +~~~~~~~~~~~~~~~~~ + +* Update documentation entry for 21.1. + +21.1 - 2021-10-29 +~~~~~~~~~~~~~~~~~ + +* Update pin to pyparsing to exclude 3.0.0. + +21.0 - 2021-07-03 +~~~~~~~~~~~~~~~~~ + +* PEP 656: musllinux support (`#411 <https://github.com/pypa/packaging/issues/411>`__) +* Drop support for Python 2.7, Python 3.4 and Python 3.5. +* Replace distutils usage with sysconfig (`#396 <https://github.com/pypa/packaging/issues/396>`__) +* Add support for zip files in ``parse_sdist_filename`` (`#429 <https://github.com/pypa/packaging/issues/429>`__) +* Use cached ``_hash`` attribute to short-circuit tag equality comparisons (`#417 <https://github.com/pypa/packaging/issues/417>`__) +* Specify the default value for the ``specifier`` argument to ``SpecifierSet`` (`#437 <https://github.com/pypa/packaging/issues/437>`__) +* Proper keyword-only "warn" argument in packaging.tags (`#403 <https://github.com/pypa/packaging/issues/403>`__) +* Correctly remove prerelease suffixes from ~= check (`#366 <https://github.com/pypa/packaging/issues/366>`__) +* Fix type hints for ``Version.post`` and ``Version.dev`` (`#393 <https://github.com/pypa/packaging/issues/393>`__) +* Use typing alias ``UnparsedVersion`` (`#398 <https://github.com/pypa/packaging/issues/398>`__) +* Improve type inference for ``packaging.specifiers.filter()`` (`#430 <https://github.com/pypa/packaging/issues/430>`__) +* Tighten the return type of ``canonicalize_version()`` (`#402 <https://github.com/pypa/packaging/issues/402>`__) + +20.9 - 2021-01-29 +~~~~~~~~~~~~~~~~~ + +* Run `isort <https://pypi.org/project/isort/>`_ over the code base (`#377 <https://github.com/pypa/packaging/issues/377>`__) +* Add support for the ``macosx_10_*_universal2`` platform tags (`#379 <https://github.com/pypa/packaging/issues/379>`__) +* Introduce ``packaging.utils.parse_wheel_filename()`` and ``parse_sdist_filename()`` + (`#387 <https://github.com/pypa/packaging/issues/387>`__ and `#389 <https://github.com/pypa/packaging/issues/389>`__) + +20.8 - 2020-12-11 +~~~~~~~~~~~~~~~~~ + +* Revert back to setuptools for compatibility purposes for some Linux distros (`#363 <https://github.com/pypa/packaging/issues/363>`__) +* Do not insert an underscore in wheel tags when the interpreter version number + is more than 2 digits (`#372 <https://github.com/pypa/packaging/issues/372>`__) + +20.7 - 2020-11-28 +~~~~~~~~~~~~~~~~~ + +No unreleased changes. + +20.6 - 2020-11-28 +~~~~~~~~~~~~~~~~~ + +.. note:: This release was subsequently yanked, and these changes were included in 20.7. + +* Fix flit configuration, to include LICENSE files (`#357 <https://github.com/pypa/packaging/issues/357>`__) +* Make `intel` a recognized CPU architecture for the `universal` macOS platform tag (`#361 <https://github.com/pypa/packaging/issues/361>`__) +* Add some missing type hints to `packaging.requirements` (issue:`350`) + +20.5 - 2020-11-27 +~~~~~~~~~~~~~~~~~ + +* Officially support Python 3.9 (`#343 <https://github.com/pypa/packaging/issues/343>`__) +* Deprecate the ``LegacyVersion`` and ``LegacySpecifier`` classes (`#321 <https://github.com/pypa/packaging/issues/321>`__) +* Handle ``OSError`` on non-dynamic executables when attempting to resolve + the glibc version string. + +20.4 - 2020-05-19 +~~~~~~~~~~~~~~~~~ + +* Canonicalize version before comparing specifiers. (`#282 <https://github.com/pypa/packaging/issues/282>`__) +* Change type hint for ``canonicalize_name`` to return + ``packaging.utils.NormalizedName``. + This enables the use of static typing tools (like mypy) to detect mixing of + normalized and un-normalized names. + +20.3 - 2020-03-05 +~~~~~~~~~~~~~~~~~ + +* Fix changelog for 20.2. + +20.2 - 2020-03-05 +~~~~~~~~~~~~~~~~~ + +* Fix a bug that caused a 32-bit OS that runs on a 64-bit ARM CPU (e.g. ARM-v8, + aarch64), to report the wrong bitness. + +20.1 - 2020-01-24 +~~~~~~~~~~~~~~~~~~~ + +* Fix a bug caused by reuse of an exhausted iterator. (`#257 <https://github.com/pypa/packaging/issues/257>`__) + +20.0 - 2020-01-06 +~~~~~~~~~~~~~~~~~ + +* Add type hints (`#191 <https://github.com/pypa/packaging/issues/191>`__) + +* Add proper trove classifiers for PyPy support (`#198 <https://github.com/pypa/packaging/issues/198>`__) + +* Scale back depending on ``ctypes`` for manylinux support detection (`#171 <https://github.com/pypa/packaging/issues/171>`__) + +* Use ``sys.implementation.name`` where appropriate for ``packaging.tags`` (`#193 <https://github.com/pypa/packaging/issues/193>`__) + +* Expand upon the API provided by ``packaging.tags``: ``interpreter_name()``, ``mac_platforms()``, ``compatible_tags()``, ``cpython_tags()``, ``generic_tags()`` (`#187 <https://github.com/pypa/packaging/issues/187>`__) + +* Officially support Python 3.8 (`#232 <https://github.com/pypa/packaging/issues/232>`__) + +* Add ``major``, ``minor``, and ``micro`` aliases to ``packaging.version.Version`` (`#226 <https://github.com/pypa/packaging/issues/226>`__) + +* Properly mark ``packaging`` has being fully typed by adding a `py.typed` file (`#226 <https://github.com/pypa/packaging/issues/226>`__) + +19.2 - 2019-09-18 +~~~~~~~~~~~~~~~~~ + +* Remove dependency on ``attrs`` (`#178 <https://github.com/pypa/packaging/issues/178>`__, `#179 <https://github.com/pypa/packaging/issues/179>`__) + +* Use appropriate fallbacks for CPython ABI tag (`#181 <https://github.com/pypa/packaging/issues/181>`__, `#185 <https://github.com/pypa/packaging/issues/185>`__) + +* Add manylinux2014 support (`#186 <https://github.com/pypa/packaging/issues/186>`__) + +* Improve ABI detection (`#181 <https://github.com/pypa/packaging/issues/181>`__) + +* Properly handle debug wheels for Python 3.8 (`#172 <https://github.com/pypa/packaging/issues/172>`__) + +* Improve detection of debug builds on Windows (`#194 <https://github.com/pypa/packaging/issues/194>`__) + +19.1 - 2019-07-30 +~~~~~~~~~~~~~~~~~ + +* Add the ``packaging.tags`` module. (`#156 <https://github.com/pypa/packaging/issues/156>`__) + +* Correctly handle two-digit versions in ``python_version`` (`#119 <https://github.com/pypa/packaging/issues/119>`__) + + +19.0 - 2019-01-20 +~~~~~~~~~~~~~~~~~ + +* Fix string representation of PEP 508 direct URL requirements with markers. + +* Better handling of file URLs + + This allows for using ``file:///absolute/path``, which was previously + prevented due to the missing ``netloc``. + + This allows for all file URLs that ``urlunparse`` turns back into the + original URL to be valid. + + +18.0 - 2018-09-26 +~~~~~~~~~~~~~~~~~ + +* Improve error messages when invalid requirements are given. (`#129 <https://github.com/pypa/packaging/issues/129>`__) + + +17.1 - 2017-02-28 +~~~~~~~~~~~~~~~~~ + +* Fix ``utils.canonicalize_version`` when supplying non PEP 440 versions. + + +17.0 - 2017-02-28 +~~~~~~~~~~~~~~~~~ + +* Drop support for python 2.6, 3.2, and 3.3. + +* Define minimal pyparsing version to 2.0.2 (`#91 <https://github.com/pypa/packaging/issues/91>`__). + +* Add ``epoch``, ``release``, ``pre``, ``dev``, and ``post`` attributes to + ``Version`` and ``LegacyVersion`` (`#34 <https://github.com/pypa/packaging/issues/34>`__). + +* Add ``Version().is_devrelease`` and ``LegacyVersion().is_devrelease`` to + make it easy to determine if a release is a development release. + +* Add ``utils.canonicalize_version`` to canonicalize version strings or + ``Version`` instances (`#121 <https://github.com/pypa/packaging/issues/121>`__). + + +16.8 - 2016-10-29 +~~~~~~~~~~~~~~~~~ + +* Fix markers that utilize ``in`` so that they render correctly. + +* Fix an erroneous test on Python RC releases. + + +16.7 - 2016-04-23 +~~~~~~~~~~~~~~~~~ + +* Add support for the deprecated ``python_implementation`` marker which was + an undocumented setuptools marker in addition to the newer markers. + + +16.6 - 2016-03-29 +~~~~~~~~~~~~~~~~~ + +* Add support for the deprecated, PEP 345 environment markers in addition to + the newer markers. + + +16.5 - 2016-02-26 +~~~~~~~~~~~~~~~~~ + +* Fix a regression in parsing requirements with whitespaces between the comma + separators. + + +16.4 - 2016-02-22 +~~~~~~~~~~~~~~~~~ + +* Fix a regression in parsing requirements like ``foo (==4)``. + + +16.3 - 2016-02-21 +~~~~~~~~~~~~~~~~~ + +* Fix a bug where ``packaging.requirements:Requirement`` was overly strict when + matching legacy requirements. + + +16.2 - 2016-02-09 +~~~~~~~~~~~~~~~~~ + +* Add a function that implements the name canonicalization from PEP 503. + + +16.1 - 2016-02-07 +~~~~~~~~~~~~~~~~~ + +* Implement requirement specifiers from PEP 508. + + +16.0 - 2016-01-19 +~~~~~~~~~~~~~~~~~ + +* Relicense so that packaging is available under *either* the Apache License, + Version 2.0 or a 2 Clause BSD license. + +* Support installation of packaging when only distutils is available. + +* Fix ``==`` comparison when there is a prefix and a local version in play. + (`#41 <https://github.com/pypa/packaging/issues/41>`__). + +* Implement environment markers from PEP 508. + + +15.3 - 2015-08-01 +~~~~~~~~~~~~~~~~~ + +* Normalize post-release spellings for rev/r prefixes. `#35 <https://github.com/pypa/packaging/issues/35>`__ + + +15.2 - 2015-05-13 +~~~~~~~~~~~~~~~~~ + +* Fix an error where the arbitrary specifier (``===``) was not correctly + allowing pre-releases when it was being used. + +* Expose the specifier and version parts through properties on the + ``Specifier`` classes. + +* Allow iterating over the ``SpecifierSet`` to get access to all of the + ``Specifier`` instances. + +* Allow testing if a version is contained within a specifier via the ``in`` + operator. + + +15.1 - 2015-04-13 +~~~~~~~~~~~~~~~~~ + +* Fix a logic error that was causing inconsistent answers about whether or not + a pre-release was contained within a ``SpecifierSet`` or not. + + +15.0 - 2015-01-02 +~~~~~~~~~~~~~~~~~ + +* Add ``Version().is_postrelease`` and ``LegacyVersion().is_postrelease`` to + make it easy to determine if a release is a post release. + +* Add ``Version().base_version`` and ``LegacyVersion().base_version`` to make + it easy to get the public version without any pre or post release markers. + +* Support the update to PEP 440 which removed the implied ``!=V.*`` when using + either ``>V`` or ``<V`` and which instead special cased the handling of + pre-releases, post-releases, and local versions when using ``>V`` or ``<V``. + + +14.5 - 2014-12-17 +~~~~~~~~~~~~~~~~~ + +* Normalize release candidates as ``rc`` instead of ``c``. + +* Expose the ``VERSION_PATTERN`` constant, a regular expression matching + a valid version. + + +14.4 - 2014-12-15 +~~~~~~~~~~~~~~~~~ + +* Ensure that versions are normalized before comparison when used in a + specifier with a less than (``<``) or greater than (``>``) operator. + + +14.3 - 2014-11-19 +~~~~~~~~~~~~~~~~~ + +* **BACKWARDS INCOMPATIBLE** Refactor specifier support so that it can sanely + handle legacy specifiers as well as PEP 440 specifiers. + +* **BACKWARDS INCOMPATIBLE** Move the specifier support out of + ``packaging.version`` into ``packaging.specifiers``. + + +14.2 - 2014-09-10 +~~~~~~~~~~~~~~~~~ + +* Add prerelease support to ``Specifier``. +* Remove the ability to do ``item in Specifier()`` and replace it with + ``Specifier().contains(item)`` in order to allow flags that signal if a + prerelease should be accepted or not. +* Add a method ``Specifier().filter()`` which will take an iterable and returns + an iterable with items that do not match the specifier filtered out. + + +14.1 - 2014-09-08 +~~~~~~~~~~~~~~~~~ + +* Allow ``LegacyVersion`` and ``Version`` to be sorted together. +* Add ``packaging.version.parse()`` to enable easily parsing a version string + as either a ``Version`` or a ``LegacyVersion`` depending on it's PEP 440 + validity. + + +14.0 - 2014-09-05 +~~~~~~~~~~~~~~~~~ + +* Initial release. + + +.. _`master`: https://github.com/pypa/packaging/ + + diff --git a/setuptools/_vendor/packaging-21.3.dist-info/RECORD b/setuptools/_vendor/packaging-21.3.dist-info/RECORD new file mode 100644 index 00000000..97cace10 --- /dev/null +++ b/setuptools/_vendor/packaging-21.3.dist-info/RECORD @@ -0,0 +1,32 @@ +packaging-21.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+packaging-21.3.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
+packaging-21.3.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
+packaging-21.3.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
+packaging-21.3.dist-info/METADATA,sha256=KuKIy6qDLP3svIt6ejCbxBDhvq11ebkgUN55MeyKFyc,15147
+packaging-21.3.dist-info/RECORD,,
+packaging-21.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+packaging-21.3.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
+packaging-21.3.dist-info/top_level.txt,sha256=zFdHrhWnPslzsiP455HutQsqPB6v0KCtNUMtUtrefDw,10
+packaging/__about__.py,sha256=ugASIO2w1oUyH8_COqQ2X_s0rDhjbhQC3yJocD03h2c,661
+packaging/__init__.py,sha256=b9Kk5MF7KxhhLgcDmiUWukN-LatWFxPdNug0joPhHSk,497
+packaging/__pycache__/__about__.cpython-310.pyc,,
+packaging/__pycache__/__init__.cpython-310.pyc,,
+packaging/__pycache__/_manylinux.cpython-310.pyc,,
+packaging/__pycache__/_musllinux.cpython-310.pyc,,
+packaging/__pycache__/_structures.cpython-310.pyc,,
+packaging/__pycache__/markers.cpython-310.pyc,,
+packaging/__pycache__/requirements.cpython-310.pyc,,
+packaging/__pycache__/specifiers.cpython-310.pyc,,
+packaging/__pycache__/tags.cpython-310.pyc,,
+packaging/__pycache__/utils.cpython-310.pyc,,
+packaging/__pycache__/version.cpython-310.pyc,,
+packaging/_manylinux.py,sha256=XcbiXB-qcjv3bcohp6N98TMpOP4_j3m-iOA8ptK2GWY,11488
+packaging/_musllinux.py,sha256=_KGgY_qc7vhMGpoqss25n2hiLCNKRtvz9mCrS7gkqyc,4378
+packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
+packaging/markers.py,sha256=Fygi3_eZnjQ-3VJizW5AhI5wvo0Hb6RMk4DidsKpOC0,8475
+packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+packaging/requirements.py,sha256=rjaGRCMepZS1mlYMjJ5Qh6rfq3gtsCRQUQmftGZ_bu8,4664
+packaging/specifiers.py,sha256=LRQ0kFsHrl5qfcFNEEJrIFYsnIHQUJXY9fIsakTrrqE,30110
+packaging/tags.py,sha256=lmsnGNiJ8C4D_Pf9PbM0qgbZvD9kmB9lpZBQUZa3R_Y,15699
+packaging/utils.py,sha256=dJjeat3BS-TYn1RrUFVwufUMasbtzLfYRoy_HXENeFQ,4200
+packaging/version.py,sha256=_fLRNrFrxYcHVfyo8vk9j8s6JM8N_xsSxVFr6RJyco8,14665
diff --git a/setuptools/_vendor/packaging-21.3.dist-info/REQUESTED b/setuptools/_vendor/packaging-21.3.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/packaging-21.3.dist-info/REQUESTED diff --git a/setuptools/_vendor/packaging-21.3.dist-info/WHEEL b/setuptools/_vendor/packaging-21.3.dist-info/WHEEL new file mode 100644 index 00000000..5bad85fd --- /dev/null +++ b/setuptools/_vendor/packaging-21.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/setuptools/_vendor/packaging-21.3.dist-info/top_level.txt b/setuptools/_vendor/packaging-21.3.dist-info/top_level.txt new file mode 100644 index 00000000..748809f7 --- /dev/null +++ b/setuptools/_vendor/packaging-21.3.dist-info/top_level.txt @@ -0,0 +1 @@ +packaging diff --git a/setuptools/_vendor/packaging/__about__.py b/setuptools/_vendor/packaging/__about__.py index c359122f..3551bc2d 100644 --- a/setuptools/_vendor/packaging/__about__.py +++ b/setuptools/_vendor/packaging/__about__.py @@ -17,7 +17,7 @@ __title__ = "packaging" __summary__ = "Core utilities for Python packages" __uri__ = "https://github.com/pypa/packaging" -__version__ = "21.2" +__version__ = "21.3" __author__ = "Donald Stufft and individual contributors" __email__ = "donald@stufft.io" diff --git a/setuptools/_vendor/packaging/_musllinux.py b/setuptools/_vendor/packaging/_musllinux.py index 85450faf..8ac3059b 100644 --- a/setuptools/_vendor/packaging/_musllinux.py +++ b/setuptools/_vendor/packaging/_musllinux.py @@ -98,7 +98,7 @@ def _get_musl_version(executable: str) -> Optional[_MuslVersion]: with contextlib.ExitStack() as stack: try: f = stack.enter_context(open(executable, "rb")) - except IOError: + except OSError: return None ld = _parse_ld_musl_from_elf(f) if not ld: diff --git a/setuptools/_vendor/packaging/_structures.py b/setuptools/_vendor/packaging/_structures.py index 95154975..90a6465f 100644 --- a/setuptools/_vendor/packaging/_structures.py +++ b/setuptools/_vendor/packaging/_structures.py @@ -19,9 +19,6 @@ class InfinityType: def __eq__(self, other: object) -> bool: return isinstance(other, self.__class__) - def __ne__(self, other: object) -> bool: - return not isinstance(other, self.__class__) - def __gt__(self, other: object) -> bool: return True @@ -51,9 +48,6 @@ class NegativeInfinityType: def __eq__(self, other: object) -> bool: return isinstance(other, self.__class__) - def __ne__(self, other: object) -> bool: - return not isinstance(other, self.__class__) - def __gt__(self, other: object) -> bool: return False diff --git a/setuptools/_vendor/packaging/specifiers.py b/setuptools/_vendor/packaging/specifiers.py index ce66bd4a..0e218a6f 100644 --- a/setuptools/_vendor/packaging/specifiers.py +++ b/setuptools/_vendor/packaging/specifiers.py @@ -57,13 +57,6 @@ class BaseSpecifier(metaclass=abc.ABCMeta): objects are equal. """ - @abc.abstractmethod - def __ne__(self, other: object) -> bool: - """ - Returns a boolean representing whether or not the two Specifier like - objects are not equal. - """ - @abc.abstractproperty def prereleases(self) -> Optional[bool]: """ @@ -119,7 +112,7 @@ class _IndividualSpecifier(BaseSpecifier): else "" ) - return "<{}({!r}{})>".format(self.__class__.__name__, str(self), pre) + return f"<{self.__class__.__name__}({str(self)!r}{pre})>" def __str__(self) -> str: return "{}{}".format(*self._spec) @@ -142,17 +135,6 @@ class _IndividualSpecifier(BaseSpecifier): return self._canonical_spec == other._canonical_spec - def __ne__(self, other: object) -> bool: - if isinstance(other, str): - try: - other = self.__class__(str(other)) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._spec != other._spec - def _get_operator(self, op: str) -> CallableOperator: operator_callable: CallableOperator = getattr( self, f"_compare_{self._operators[op]}" @@ -667,7 +649,7 @@ class SpecifierSet(BaseSpecifier): else "" ) - return "<SpecifierSet({!r}{})>".format(str(self), pre) + return f"<SpecifierSet({str(self)!r}{pre})>" def __str__(self) -> str: return ",".join(sorted(str(s) for s in self._specs)) @@ -706,14 +688,6 @@ class SpecifierSet(BaseSpecifier): return self._specs == other._specs - def __ne__(self, other: object) -> bool: - if isinstance(other, (str, _IndividualSpecifier)): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs != other._specs - def __len__(self) -> int: return len(self._specs) diff --git a/setuptools/_vendor/packaging/tags.py b/setuptools/_vendor/packaging/tags.py index e65890a9..9a3d25a7 100644 --- a/setuptools/_vendor/packaging/tags.py +++ b/setuptools/_vendor/packaging/tags.py @@ -90,7 +90,7 @@ class Tag: return f"{self._interpreter}-{self._abi}-{self._platform}" def __repr__(self) -> str: - return "<{self} @ {self_id}>".format(self=self, self_id=id(self)) + return f"<{self} @ {id(self)}>" def parse_tag(tag: str) -> FrozenSet[Tag]: @@ -192,7 +192,7 @@ def cpython_tags( if not python_version: python_version = sys.version_info[:2] - interpreter = "cp{}".format(_version_nodot(python_version[:2])) + interpreter = f"cp{_version_nodot(python_version[:2])}" if abis is None: if len(python_version) > 1: @@ -268,11 +268,11 @@ def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: all previous versions of that major version. """ if len(py_version) > 1: - yield "py{version}".format(version=_version_nodot(py_version[:2])) - yield "py{major}".format(major=py_version[0]) + yield f"py{_version_nodot(py_version[:2])}" + yield f"py{py_version[0]}" if len(py_version) > 1: for minor in range(py_version[1] - 1, -1, -1): - yield "py{version}".format(version=_version_nodot((py_version[0], minor))) + yield f"py{_version_nodot((py_version[0], minor))}" def compatible_tags( @@ -481,4 +481,7 @@ def sys_tags(*, warn: bool = False) -> Iterator[Tag]: else: yield from generic_tags() - yield from compatible_tags() + if interp_name == "pp": + yield from compatible_tags(interpreter="pp3") + else: + yield from compatible_tags() diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/DESCRIPTION.rst b/setuptools/_vendor/pyparsing-2.2.1.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..e1187231 --- /dev/null +++ b/setuptools/_vendor/pyparsing-2.2.1.dist-info/DESCRIPTION.rst @@ -0,0 +1,3 @@ +UNKNOWN + + diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/INSTALLER b/setuptools/_vendor/pyparsing-2.2.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/setuptools/_vendor/pyparsing-2.2.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt b/setuptools/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt new file mode 100644 index 00000000..bbc959e0 --- /dev/null +++ b/setuptools/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt @@ -0,0 +1,18 @@ +Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/METADATA b/setuptools/_vendor/pyparsing-2.2.1.dist-info/METADATA new file mode 100644 index 00000000..a15c350e --- /dev/null +++ b/setuptools/_vendor/pyparsing-2.2.1.dist-info/METADATA @@ -0,0 +1,30 @@ +Metadata-Version: 2.0 +Name: pyparsing +Version: 2.2.1 +Summary: Python parsing module +Home-page: https://github.com/pyparsing/pyparsing/ +Author: Paul McGuire +Author-email: ptmcg@users.sourceforge.net +License: MIT License +Download-URL: https://pypi.org/project/pyparsing/ +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.* + +UNKNOWN + + diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/RECORD b/setuptools/_vendor/pyparsing-2.2.1.dist-info/RECORD new file mode 100644 index 00000000..09cc30e3 --- /dev/null +++ b/setuptools/_vendor/pyparsing-2.2.1.dist-info/RECORD @@ -0,0 +1,11 @@ +__pycache__/pyparsing.cpython-310.pyc,,
+pyparsing-2.2.1.dist-info/DESCRIPTION.rst,sha256=OCTuuN6LcWulhHS3d5rfjdsQtW22n7HENFRh6jC6ego,10
+pyparsing-2.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+pyparsing-2.2.1.dist-info/LICENSE.txt,sha256=081Pq74Spe1XdwrGkewNKSqa078kLIh7UWI-wVjdj8I,1041
+pyparsing-2.2.1.dist-info/METADATA,sha256=I0jhx9vpUYlQXjn4gVDnFFoAt3nNrxwR4iuqA_pknYs,1091
+pyparsing-2.2.1.dist-info/RECORD,,
+pyparsing-2.2.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pyparsing-2.2.1.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110
+pyparsing-2.2.1.dist-info/metadata.json,sha256=v1_77-dSdajUZSItSJg8Ov9M713STY3PzhyrRvs1ax4,1185
+pyparsing-2.2.1.dist-info/top_level.txt,sha256=eUOjGzJVhlQ3WS2rFAy2mN3LX_7FKTM5GSJ04jfnLmU,10
+pyparsing.py,sha256=tmrp-lu-qO1i75ZzIN5A12nKRRD1Cm4Vpk-5LR9rims,232055
diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/REQUESTED b/setuptools/_vendor/pyparsing-2.2.1.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/pyparsing-2.2.1.dist-info/REQUESTED diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/WHEEL b/setuptools/_vendor/pyparsing-2.2.1.dist-info/WHEEL new file mode 100644 index 00000000..7332a419 --- /dev/null +++ b/setuptools/_vendor/pyparsing-2.2.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/metadata.json b/setuptools/_vendor/pyparsing-2.2.1.dist-info/metadata.json new file mode 100644 index 00000000..b760b766 --- /dev/null +++ b/setuptools/_vendor/pyparsing-2.2.1.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7"], "download_url": "https://pypi.org/project/pyparsing/", "extensions": {"python.details": {"contacts": [{"email": "ptmcg@users.sourceforge.net", "name": "Paul McGuire", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "https://github.com/pyparsing/pyparsing/"}}}, "generator": "bdist_wheel (0.30.0)", "license": "MIT License", "metadata_version": "2.0", "name": "pyparsing", "requires_python": ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*", "summary": "Python parsing module", "version": "2.2.1"}
\ No newline at end of file diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/top_level.txt b/setuptools/_vendor/pyparsing-2.2.1.dist-info/top_level.txt new file mode 100644 index 00000000..210dfec5 --- /dev/null +++ b/setuptools/_vendor/pyparsing-2.2.1.dist-info/top_level.txt @@ -0,0 +1 @@ +pyparsing diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/INSTALLER b/setuptools/_vendor/tomli-2.0.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/setuptools/_vendor/tomli-2.0.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/LICENSE b/setuptools/_vendor/tomli-2.0.1.dist-info/LICENSE new file mode 100644 index 00000000..e859590f --- /dev/null +++ b/setuptools/_vendor/tomli-2.0.1.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Taneli Hukkinen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/METADATA b/setuptools/_vendor/tomli-2.0.1.dist-info/METADATA new file mode 100644 index 00000000..efd87ecc --- /dev/null +++ b/setuptools/_vendor/tomli-2.0.1.dist-info/METADATA @@ -0,0 +1,206 @@ +Metadata-Version: 2.1 +Name: tomli +Version: 2.0.1 +Summary: A lil' TOML parser +Keywords: toml +Author-email: Taneli Hukkinen <hukkin@users.noreply.github.com> +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: MacOS +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX :: Linux +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Typing :: Typed +Project-URL: Changelog, https://github.com/hukkin/tomli/blob/master/CHANGELOG.md +Project-URL: Homepage, https://github.com/hukkin/tomli + +[](https://github.com/hukkin/tomli/actions?query=workflow%3ATests+branch%3Amaster+event%3Apush) +[](https://codecov.io/gh/hukkin/tomli) +[](https://pypi.org/project/tomli) + +# Tomli + +> A lil' TOML parser + +**Table of Contents** *generated with [mdformat-toc](https://github.com/hukkin/mdformat-toc)* + +<!-- mdformat-toc start --slug=github --maxlevel=6 --minlevel=2 --> + +- [Intro](#intro) +- [Installation](#installation) +- [Usage](#usage) + - [Parse a TOML string](#parse-a-toml-string) + - [Parse a TOML file](#parse-a-toml-file) + - [Handle invalid TOML](#handle-invalid-toml) + - [Construct `decimal.Decimal`s from TOML floats](#construct-decimaldecimals-from-toml-floats) +- [FAQ](#faq) + - [Why this parser?](#why-this-parser) + - [Is comment preserving round-trip parsing supported?](#is-comment-preserving-round-trip-parsing-supported) + - [Is there a `dumps`, `write` or `encode` function?](#is-there-a-dumps-write-or-encode-function) + - [How do TOML types map into Python types?](#how-do-toml-types-map-into-python-types) +- [Performance](#performance) + +<!-- mdformat-toc end --> + +## Intro<a name="intro"></a> + +Tomli is a Python library for parsing [TOML](https://toml.io). +Tomli is fully compatible with [TOML v1.0.0](https://toml.io/en/v1.0.0). + +## Installation<a name="installation"></a> + +```bash +pip install tomli +``` + +## Usage<a name="usage"></a> + +### Parse a TOML string<a name="parse-a-toml-string"></a> + +```python +import tomli + +toml_str = """ + gretzky = 99 + + [kurri] + jari = 17 + """ + +toml_dict = tomli.loads(toml_str) +assert toml_dict == {"gretzky": 99, "kurri": {"jari": 17}} +``` + +### Parse a TOML file<a name="parse-a-toml-file"></a> + +```python +import tomli + +with open("path_to_file/conf.toml", "rb") as f: + toml_dict = tomli.load(f) +``` + +The file must be opened in binary mode (with the `"rb"` flag). +Binary mode will enforce decoding the file as UTF-8 with universal newlines disabled, +both of which are required to correctly parse TOML. + +### Handle invalid TOML<a name="handle-invalid-toml"></a> + +```python +import tomli + +try: + toml_dict = tomli.loads("]] this is invalid TOML [[") +except tomli.TOMLDecodeError: + print("Yep, definitely not valid.") +``` + +Note that error messages are considered informational only. +They should not be assumed to stay constant across Tomli versions. + +### Construct `decimal.Decimal`s from TOML floats<a name="construct-decimaldecimals-from-toml-floats"></a> + +```python +from decimal import Decimal +import tomli + +toml_dict = tomli.loads("precision-matters = 0.982492", parse_float=Decimal) +assert toml_dict["precision-matters"] == Decimal("0.982492") +``` + +Note that `decimal.Decimal` can be replaced with another callable that converts a TOML float from string to a Python type. +The `decimal.Decimal` is, however, a practical choice for use cases where float inaccuracies can not be tolerated. + +Illegal types are `dict` and `list`, and their subtypes. +A `ValueError` will be raised if `parse_float` produces illegal types. + +## FAQ<a name="faq"></a> + +### Why this parser?<a name="why-this-parser"></a> + +- it's lil' +- pure Python with zero dependencies +- the fastest pure Python parser [\*](#performance): + 15x as fast as [tomlkit](https://pypi.org/project/tomlkit/), + 2.4x as fast as [toml](https://pypi.org/project/toml/) +- outputs [basic data types](#how-do-toml-types-map-into-python-types) only +- 100% spec compliant: passes all tests in + [a test set](https://github.com/toml-lang/compliance/pull/8) + soon to be merged to the official + [compliance tests for TOML](https://github.com/toml-lang/compliance) + repository +- thoroughly tested: 100% branch coverage + +### Is comment preserving round-trip parsing supported?<a name="is-comment-preserving-round-trip-parsing-supported"></a> + +No. + +The `tomli.loads` function returns a plain `dict` that is populated with builtin types and types from the standard library only. +Preserving comments requires a custom type to be returned so will not be supported, +at least not by the `tomli.loads` and `tomli.load` functions. + +Look into [TOML Kit](https://github.com/sdispater/tomlkit) if preservation of style is what you need. + +### Is there a `dumps`, `write` or `encode` function?<a name="is-there-a-dumps-write-or-encode-function"></a> + +[Tomli-W](https://github.com/hukkin/tomli-w) is the write-only counterpart of Tomli, providing `dump` and `dumps` functions. + +The core library does not include write capability, as most TOML use cases are read-only, and Tomli intends to be minimal. + +### How do TOML types map into Python types?<a name="how-do-toml-types-map-into-python-types"></a> + +| TOML type | Python type | Details | +| ---------------- | ------------------- | ------------------------------------------------------------ | +| Document Root | `dict` | | +| Key | `str` | | +| String | `str` | | +| Integer | `int` | | +| Float | `float` | | +| Boolean | `bool` | | +| Offset Date-Time | `datetime.datetime` | `tzinfo` attribute set to an instance of `datetime.timezone` | +| Local Date-Time | `datetime.datetime` | `tzinfo` attribute set to `None` | +| Local Date | `datetime.date` | | +| Local Time | `datetime.time` | | +| Array | `list` | | +| Table | `dict` | | +| Inline Table | `dict` | | + +## Performance<a name="performance"></a> + +The `benchmark/` folder in this repository contains a performance benchmark for comparing the various Python TOML parsers. +The benchmark can be run with `tox -e benchmark-pypi`. +Running the benchmark on my personal computer output the following: + +```console +foo@bar:~/dev/tomli$ tox -e benchmark-pypi +benchmark-pypi installed: attrs==19.3.0,click==7.1.2,pytomlpp==1.0.2,qtoml==0.3.0,rtoml==0.7.0,toml==0.10.2,tomli==1.1.0,tomlkit==0.7.2 +benchmark-pypi run-test-pre: PYTHONHASHSEED='2658546909' +benchmark-pypi run-test: commands[0] | python -c 'import datetime; print(datetime.date.today())' +2021-07-23 +benchmark-pypi run-test: commands[1] | python --version +Python 3.8.10 +benchmark-pypi run-test: commands[2] | python benchmark/run.py +Parsing data.toml 5000 times: +------------------------------------------------------ + parser | exec time | performance (more is better) +-----------+------------+----------------------------- + rtoml | 0.901 s | baseline (100%) + pytomlpp | 1.08 s | 83.15% + tomli | 3.89 s | 23.15% + toml | 9.36 s | 9.63% + qtoml | 11.5 s | 7.82% + tomlkit | 56.8 s | 1.59% +``` + +The parsers are ordered from fastest to slowest, using the fastest parser as baseline. +Tomli performed the best out of all pure Python TOML parsers, +losing only to pytomlpp (wraps C++) and rtoml (wraps Rust). + diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD b/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD new file mode 100644 index 00000000..2d93fa2c --- /dev/null +++ b/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD @@ -0,0 +1,15 @@ +tomli-2.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +tomli-2.0.1.dist-info/LICENSE,sha256=uAgWsNUwuKzLTCIReDeQmEpuO2GSLCte6S8zcqsnQv4,1072 +tomli-2.0.1.dist-info/METADATA,sha256=zPDceKmPwJGLWtZykrHixL7WVXWmJGzZ1jyRT5lCoPI,8875 +tomli-2.0.1.dist-info/RECORD,, +tomli-2.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +tomli-2.0.1.dist-info/WHEEL,sha256=jPMR_Dzkc4X4icQtmz81lnNY_kAsfog7ry7qoRvYLXw,81 +tomli/__init__.py,sha256=JhUwV66DB1g4Hvt1UQCVMdfCu-IgAV8FXmvDU9onxd4,396 +tomli/__pycache__/__init__.cpython-38.pyc,, +tomli/__pycache__/_parser.cpython-38.pyc,, +tomli/__pycache__/_re.cpython-38.pyc,, +tomli/__pycache__/_types.cpython-38.pyc,, +tomli/_parser.py,sha256=g9-ENaALS-B8dokYpCuzUFalWlog7T-SIYMjLZSWrtM,22633 +tomli/_re.py,sha256=dbjg5ChZT23Ka9z9DHOXfdtSpPwUfdgMXnj8NOoly-w,2943 +tomli/_types.py,sha256=-GTG2VUqkpxwMqzmVO4F7ybKddIbAnuAHXfmWQcTi3Q,254 +tomli/py.typed,sha256=8PjyZ1aVoQpRVvt71muvuq5qE-jTFZkK-GLHkhdebmc,26 diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/REQUESTED b/setuptools/_vendor/tomli-2.0.1.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/tomli-2.0.1.dist-info/REQUESTED diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/WHEEL b/setuptools/_vendor/tomli-2.0.1.dist-info/WHEEL new file mode 100644 index 00000000..c727d148 --- /dev/null +++ b/setuptools/_vendor/tomli-2.0.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.6.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/setuptools/_vendor/tomli/__init__.py b/setuptools/_vendor/tomli/__init__.py new file mode 100644 index 00000000..4c6ec97e --- /dev/null +++ b/setuptools/_vendor/tomli/__init__.py @@ -0,0 +1,11 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +__all__ = ("loads", "load", "TOMLDecodeError") +__version__ = "2.0.1" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT + +from ._parser import TOMLDecodeError, load, loads + +# Pretend this exception was created here. +TOMLDecodeError.__module__ = __name__ diff --git a/setuptools/_vendor/tomli/_parser.py b/setuptools/_vendor/tomli/_parser.py new file mode 100644 index 00000000..f1bb0aa1 --- /dev/null +++ b/setuptools/_vendor/tomli/_parser.py @@ -0,0 +1,691 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +from __future__ import annotations + +from collections.abc import Iterable +import string +from types import MappingProxyType +from typing import Any, BinaryIO, NamedTuple + +from ._re import ( + RE_DATETIME, + RE_LOCALTIME, + RE_NUMBER, + match_to_datetime, + match_to_localtime, + match_to_number, +) +from ._types import Key, ParseFloat, Pos + +ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127)) + +# Neither of these sets include quotation mark or backslash. They are +# currently handled as separate cases in the parser functions. +ILLEGAL_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t") +ILLEGAL_MULTILINE_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t\n") + +ILLEGAL_LITERAL_STR_CHARS = ILLEGAL_BASIC_STR_CHARS +ILLEGAL_MULTILINE_LITERAL_STR_CHARS = ILLEGAL_MULTILINE_BASIC_STR_CHARS + +ILLEGAL_COMMENT_CHARS = ILLEGAL_BASIC_STR_CHARS + +TOML_WS = frozenset(" \t") +TOML_WS_AND_NEWLINE = TOML_WS | frozenset("\n") +BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + "-_") +KEY_INITIAL_CHARS = BARE_KEY_CHARS | frozenset("\"'") +HEXDIGIT_CHARS = frozenset(string.hexdigits) + +BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType( + { + "\\b": "\u0008", # backspace + "\\t": "\u0009", # tab + "\\n": "\u000A", # linefeed + "\\f": "\u000C", # form feed + "\\r": "\u000D", # carriage return + '\\"': "\u0022", # quote + "\\\\": "\u005C", # backslash + } +) + + +class TOMLDecodeError(ValueError): + """An error raised if a document is not valid TOML.""" + + +def load(__fp: BinaryIO, *, parse_float: ParseFloat = float) -> dict[str, Any]: + """Parse TOML from a binary file object.""" + b = __fp.read() + try: + s = b.decode() + except AttributeError: + raise TypeError( + "File must be opened in binary mode, e.g. use `open('foo.toml', 'rb')`" + ) from None + return loads(s, parse_float=parse_float) + + +def loads(__s: str, *, parse_float: ParseFloat = float) -> dict[str, Any]: # noqa: C901 + """Parse TOML from a string.""" + + # The spec allows converting "\r\n" to "\n", even in string + # literals. Let's do so to simplify parsing. + src = __s.replace("\r\n", "\n") + pos = 0 + out = Output(NestedDict(), Flags()) + header: Key = () + parse_float = make_safe_parse_float(parse_float) + + # Parse one statement at a time + # (typically means one line in TOML source) + while True: + # 1. Skip line leading whitespace + pos = skip_chars(src, pos, TOML_WS) + + # 2. Parse rules. Expect one of the following: + # - end of file + # - end of line + # - comment + # - key/value pair + # - append dict to list (and move to its namespace) + # - create dict (and move to its namespace) + # Skip trailing whitespace when applicable. + try: + char = src[pos] + except IndexError: + break + if char == "\n": + pos += 1 + continue + if char in KEY_INITIAL_CHARS: + pos = key_value_rule(src, pos, out, header, parse_float) + pos = skip_chars(src, pos, TOML_WS) + elif char == "[": + try: + second_char: str | None = src[pos + 1] + except IndexError: + second_char = None + out.flags.finalize_pending() + if second_char == "[": + pos, header = create_list_rule(src, pos, out) + else: + pos, header = create_dict_rule(src, pos, out) + pos = skip_chars(src, pos, TOML_WS) + elif char != "#": + raise suffixed_err(src, pos, "Invalid statement") + + # 3. Skip comment + pos = skip_comment(src, pos) + + # 4. Expect end of line or end of file + try: + char = src[pos] + except IndexError: + break + if char != "\n": + raise suffixed_err( + src, pos, "Expected newline or end of document after a statement" + ) + pos += 1 + + return out.data.dict + + +class Flags: + """Flags that map to parsed keys/namespaces.""" + + # Marks an immutable namespace (inline array or inline table). + FROZEN = 0 + # Marks a nest that has been explicitly created and can no longer + # be opened using the "[table]" syntax. + EXPLICIT_NEST = 1 + + def __init__(self) -> None: + self._flags: dict[str, dict] = {} + self._pending_flags: set[tuple[Key, int]] = set() + + def add_pending(self, key: Key, flag: int) -> None: + self._pending_flags.add((key, flag)) + + def finalize_pending(self) -> None: + for key, flag in self._pending_flags: + self.set(key, flag, recursive=False) + self._pending_flags.clear() + + def unset_all(self, key: Key) -> None: + cont = self._flags + for k in key[:-1]: + if k not in cont: + return + cont = cont[k]["nested"] + cont.pop(key[-1], None) + + def set(self, key: Key, flag: int, *, recursive: bool) -> None: # noqa: A003 + cont = self._flags + key_parent, key_stem = key[:-1], key[-1] + for k in key_parent: + if k not in cont: + cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}} + cont = cont[k]["nested"] + if key_stem not in cont: + cont[key_stem] = {"flags": set(), "recursive_flags": set(), "nested": {}} + cont[key_stem]["recursive_flags" if recursive else "flags"].add(flag) + + def is_(self, key: Key, flag: int) -> bool: + if not key: + return False # document root has no flags + cont = self._flags + for k in key[:-1]: + if k not in cont: + return False + inner_cont = cont[k] + if flag in inner_cont["recursive_flags"]: + return True + cont = inner_cont["nested"] + key_stem = key[-1] + if key_stem in cont: + cont = cont[key_stem] + return flag in cont["flags"] or flag in cont["recursive_flags"] + return False + + +class NestedDict: + def __init__(self) -> None: + # The parsed content of the TOML document + self.dict: dict[str, Any] = {} + + def get_or_create_nest( + self, + key: Key, + *, + access_lists: bool = True, + ) -> dict: + cont: Any = self.dict + for k in key: + if k not in cont: + cont[k] = {} + cont = cont[k] + if access_lists and isinstance(cont, list): + cont = cont[-1] + if not isinstance(cont, dict): + raise KeyError("There is no nest behind this key") + return cont + + def append_nest_to_list(self, key: Key) -> None: + cont = self.get_or_create_nest(key[:-1]) + last_key = key[-1] + if last_key in cont: + list_ = cont[last_key] + if not isinstance(list_, list): + raise KeyError("An object other than list found behind this key") + list_.append({}) + else: + cont[last_key] = [{}] + + +class Output(NamedTuple): + data: NestedDict + flags: Flags + + +def skip_chars(src: str, pos: Pos, chars: Iterable[str]) -> Pos: + try: + while src[pos] in chars: + pos += 1 + except IndexError: + pass + return pos + + +def skip_until( + src: str, + pos: Pos, + expect: str, + *, + error_on: frozenset[str], + error_on_eof: bool, +) -> Pos: + try: + new_pos = src.index(expect, pos) + except ValueError: + new_pos = len(src) + if error_on_eof: + raise suffixed_err(src, new_pos, f"Expected {expect!r}") from None + + if not error_on.isdisjoint(src[pos:new_pos]): + while src[pos] not in error_on: + pos += 1 + raise suffixed_err(src, pos, f"Found invalid character {src[pos]!r}") + return new_pos + + +def skip_comment(src: str, pos: Pos) -> Pos: + try: + char: str | None = src[pos] + except IndexError: + char = None + if char == "#": + return skip_until( + src, pos + 1, "\n", error_on=ILLEGAL_COMMENT_CHARS, error_on_eof=False + ) + return pos + + +def skip_comments_and_array_ws(src: str, pos: Pos) -> Pos: + while True: + pos_before_skip = pos + pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) + pos = skip_comment(src, pos) + if pos == pos_before_skip: + return pos + + +def create_dict_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: + pos += 1 # Skip "[" + pos = skip_chars(src, pos, TOML_WS) + pos, key = parse_key(src, pos) + + if out.flags.is_(key, Flags.EXPLICIT_NEST) or out.flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Cannot declare {key} twice") + out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + try: + out.data.get_or_create_nest(key) + except KeyError: + raise suffixed_err(src, pos, "Cannot overwrite a value") from None + + if not src.startswith("]", pos): + raise suffixed_err(src, pos, "Expected ']' at the end of a table declaration") + return pos + 1, key + + +def create_list_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: + pos += 2 # Skip "[[" + pos = skip_chars(src, pos, TOML_WS) + pos, key = parse_key(src, pos) + + if out.flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Cannot mutate immutable namespace {key}") + # Free the namespace now that it points to another empty list item... + out.flags.unset_all(key) + # ...but this key precisely is still prohibited from table declaration + out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + try: + out.data.append_nest_to_list(key) + except KeyError: + raise suffixed_err(src, pos, "Cannot overwrite a value") from None + + if not src.startswith("]]", pos): + raise suffixed_err(src, pos, "Expected ']]' at the end of an array declaration") + return pos + 2, key + + +def key_value_rule( + src: str, pos: Pos, out: Output, header: Key, parse_float: ParseFloat +) -> Pos: + pos, key, value = parse_key_value_pair(src, pos, parse_float) + key_parent, key_stem = key[:-1], key[-1] + abs_key_parent = header + key_parent + + relative_path_cont_keys = (header + key[:i] for i in range(1, len(key))) + for cont_key in relative_path_cont_keys: + # Check that dotted key syntax does not redefine an existing table + if out.flags.is_(cont_key, Flags.EXPLICIT_NEST): + raise suffixed_err(src, pos, f"Cannot redefine namespace {cont_key}") + # Containers in the relative path can't be opened with the table syntax or + # dotted key/value syntax in following table sections. + out.flags.add_pending(cont_key, Flags.EXPLICIT_NEST) + + if out.flags.is_(abs_key_parent, Flags.FROZEN): + raise suffixed_err( + src, pos, f"Cannot mutate immutable namespace {abs_key_parent}" + ) + + try: + nest = out.data.get_or_create_nest(abs_key_parent) + except KeyError: + raise suffixed_err(src, pos, "Cannot overwrite a value") from None + if key_stem in nest: + raise suffixed_err(src, pos, "Cannot overwrite a value") + # Mark inline table and array namespaces recursively immutable + if isinstance(value, (dict, list)): + out.flags.set(header + key, Flags.FROZEN, recursive=True) + nest[key_stem] = value + return pos + + +def parse_key_value_pair( + src: str, pos: Pos, parse_float: ParseFloat +) -> tuple[Pos, Key, Any]: + pos, key = parse_key(src, pos) + try: + char: str | None = src[pos] + except IndexError: + char = None + if char != "=": + raise suffixed_err(src, pos, "Expected '=' after a key in a key/value pair") + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + pos, value = parse_value(src, pos, parse_float) + return pos, key, value + + +def parse_key(src: str, pos: Pos) -> tuple[Pos, Key]: + pos, key_part = parse_key_part(src, pos) + key: Key = (key_part,) + pos = skip_chars(src, pos, TOML_WS) + while True: + try: + char: str | None = src[pos] + except IndexError: + char = None + if char != ".": + return pos, key + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + pos, key_part = parse_key_part(src, pos) + key += (key_part,) + pos = skip_chars(src, pos, TOML_WS) + + +def parse_key_part(src: str, pos: Pos) -> tuple[Pos, str]: + try: + char: str | None = src[pos] + except IndexError: + char = None + if char in BARE_KEY_CHARS: + start_pos = pos + pos = skip_chars(src, pos, BARE_KEY_CHARS) + return pos, src[start_pos:pos] + if char == "'": + return parse_literal_str(src, pos) + if char == '"': + return parse_one_line_basic_str(src, pos) + raise suffixed_err(src, pos, "Invalid initial character for a key part") + + +def parse_one_line_basic_str(src: str, pos: Pos) -> tuple[Pos, str]: + pos += 1 + return parse_basic_str(src, pos, multiline=False) + + +def parse_array(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos, list]: + pos += 1 + array: list = [] + + pos = skip_comments_and_array_ws(src, pos) + if src.startswith("]", pos): + return pos + 1, array + while True: + pos, val = parse_value(src, pos, parse_float) + array.append(val) + pos = skip_comments_and_array_ws(src, pos) + + c = src[pos : pos + 1] + if c == "]": + return pos + 1, array + if c != ",": + raise suffixed_err(src, pos, "Unclosed array") + pos += 1 + + pos = skip_comments_and_array_ws(src, pos) + if src.startswith("]", pos): + return pos + 1, array + + +def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos, dict]: + pos += 1 + nested_dict = NestedDict() + flags = Flags() + + pos = skip_chars(src, pos, TOML_WS) + if src.startswith("}", pos): + return pos + 1, nested_dict.dict + while True: + pos, key, value = parse_key_value_pair(src, pos, parse_float) + key_parent, key_stem = key[:-1], key[-1] + if flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Cannot mutate immutable namespace {key}") + try: + nest = nested_dict.get_or_create_nest(key_parent, access_lists=False) + except KeyError: + raise suffixed_err(src, pos, "Cannot overwrite a value") from None + if key_stem in nest: + raise suffixed_err(src, pos, f"Duplicate inline table key {key_stem!r}") + nest[key_stem] = value + pos = skip_chars(src, pos, TOML_WS) + c = src[pos : pos + 1] + if c == "}": + return pos + 1, nested_dict.dict + if c != ",": + raise suffixed_err(src, pos, "Unclosed inline table") + if isinstance(value, (dict, list)): + flags.set(key, Flags.FROZEN, recursive=True) + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + + +def parse_basic_str_escape( + src: str, pos: Pos, *, multiline: bool = False +) -> tuple[Pos, str]: + escape_id = src[pos : pos + 2] + pos += 2 + if multiline and escape_id in {"\\ ", "\\\t", "\\\n"}: + # Skip whitespace until next non-whitespace character or end of + # the doc. Error if non-whitespace is found before newline. + if escape_id != "\\\n": + pos = skip_chars(src, pos, TOML_WS) + try: + char = src[pos] + except IndexError: + return pos, "" + if char != "\n": + raise suffixed_err(src, pos, "Unescaped '\\' in a string") + pos += 1 + pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) + return pos, "" + if escape_id == "\\u": + return parse_hex_char(src, pos, 4) + if escape_id == "\\U": + return parse_hex_char(src, pos, 8) + try: + return pos, BASIC_STR_ESCAPE_REPLACEMENTS[escape_id] + except KeyError: + raise suffixed_err(src, pos, "Unescaped '\\' in a string") from None + + +def parse_basic_str_escape_multiline(src: str, pos: Pos) -> tuple[Pos, str]: + return parse_basic_str_escape(src, pos, multiline=True) + + +def parse_hex_char(src: str, pos: Pos, hex_len: int) -> tuple[Pos, str]: + hex_str = src[pos : pos + hex_len] + if len(hex_str) != hex_len or not HEXDIGIT_CHARS.issuperset(hex_str): + raise suffixed_err(src, pos, "Invalid hex value") + pos += hex_len + hex_int = int(hex_str, 16) + if not is_unicode_scalar_value(hex_int): + raise suffixed_err(src, pos, "Escaped character is not a Unicode scalar value") + return pos, chr(hex_int) + + +def parse_literal_str(src: str, pos: Pos) -> tuple[Pos, str]: + pos += 1 # Skip starting apostrophe + start_pos = pos + pos = skip_until( + src, pos, "'", error_on=ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True + ) + return pos + 1, src[start_pos:pos] # Skip ending apostrophe + + +def parse_multiline_str(src: str, pos: Pos, *, literal: bool) -> tuple[Pos, str]: + pos += 3 + if src.startswith("\n", pos): + pos += 1 + + if literal: + delim = "'" + end_pos = skip_until( + src, + pos, + "'''", + error_on=ILLEGAL_MULTILINE_LITERAL_STR_CHARS, + error_on_eof=True, + ) + result = src[pos:end_pos] + pos = end_pos + 3 + else: + delim = '"' + pos, result = parse_basic_str(src, pos, multiline=True) + + # Add at maximum two extra apostrophes/quotes if the end sequence + # is 4 or 5 chars long instead of just 3. + if not src.startswith(delim, pos): + return pos, result + pos += 1 + if not src.startswith(delim, pos): + return pos, result + delim + pos += 1 + return pos, result + (delim * 2) + + +def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> tuple[Pos, str]: + if multiline: + error_on = ILLEGAL_MULTILINE_BASIC_STR_CHARS + parse_escapes = parse_basic_str_escape_multiline + else: + error_on = ILLEGAL_BASIC_STR_CHARS + parse_escapes = parse_basic_str_escape + result = "" + start_pos = pos + while True: + try: + char = src[pos] + except IndexError: + raise suffixed_err(src, pos, "Unterminated string") from None + if char == '"': + if not multiline: + return pos + 1, result + src[start_pos:pos] + if src.startswith('"""', pos): + return pos + 3, result + src[start_pos:pos] + pos += 1 + continue + if char == "\\": + result += src[start_pos:pos] + pos, parsed_escape = parse_escapes(src, pos) + result += parsed_escape + start_pos = pos + continue + if char in error_on: + raise suffixed_err(src, pos, f"Illegal character {char!r}") + pos += 1 + + +def parse_value( # noqa: C901 + src: str, pos: Pos, parse_float: ParseFloat +) -> tuple[Pos, Any]: + try: + char: str | None = src[pos] + except IndexError: + char = None + + # IMPORTANT: order conditions based on speed of checking and likelihood + + # Basic strings + if char == '"': + if src.startswith('"""', pos): + return parse_multiline_str(src, pos, literal=False) + return parse_one_line_basic_str(src, pos) + + # Literal strings + if char == "'": + if src.startswith("'''", pos): + return parse_multiline_str(src, pos, literal=True) + return parse_literal_str(src, pos) + + # Booleans + if char == "t": + if src.startswith("true", pos): + return pos + 4, True + if char == "f": + if src.startswith("false", pos): + return pos + 5, False + + # Arrays + if char == "[": + return parse_array(src, pos, parse_float) + + # Inline tables + if char == "{": + return parse_inline_table(src, pos, parse_float) + + # Dates and times + datetime_match = RE_DATETIME.match(src, pos) + if datetime_match: + try: + datetime_obj = match_to_datetime(datetime_match) + except ValueError as e: + raise suffixed_err(src, pos, "Invalid date or datetime") from e + return datetime_match.end(), datetime_obj + localtime_match = RE_LOCALTIME.match(src, pos) + if localtime_match: + return localtime_match.end(), match_to_localtime(localtime_match) + + # Integers and "normal" floats. + # The regex will greedily match any type starting with a decimal + # char, so needs to be located after handling of dates and times. + number_match = RE_NUMBER.match(src, pos) + if number_match: + return number_match.end(), match_to_number(number_match, parse_float) + + # Special floats + first_three = src[pos : pos + 3] + if first_three in {"inf", "nan"}: + return pos + 3, parse_float(first_three) + first_four = src[pos : pos + 4] + if first_four in {"-inf", "+inf", "-nan", "+nan"}: + return pos + 4, parse_float(first_four) + + raise suffixed_err(src, pos, "Invalid value") + + +def suffixed_err(src: str, pos: Pos, msg: str) -> TOMLDecodeError: + """Return a `TOMLDecodeError` where error message is suffixed with + coordinates in source.""" + + def coord_repr(src: str, pos: Pos) -> str: + if pos >= len(src): + return "end of document" + line = src.count("\n", 0, pos) + 1 + if line == 1: + column = pos + 1 + else: + column = pos - src.rindex("\n", 0, pos) + return f"line {line}, column {column}" + + return TOMLDecodeError(f"{msg} (at {coord_repr(src, pos)})") + + +def is_unicode_scalar_value(codepoint: int) -> bool: + return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111) + + +def make_safe_parse_float(parse_float: ParseFloat) -> ParseFloat: + """A decorator to make `parse_float` safe. + + `parse_float` must not return dicts or lists, because these types + would be mixed with parsed TOML tables and arrays, thus confusing + the parser. The returned decorated callable raises `ValueError` + instead of returning illegal types. + """ + # The default `float` callable never returns illegal types. Optimize it. + if parse_float is float: # type: ignore[comparison-overlap] + return float + + def safe_parse_float(float_str: str) -> Any: + float_value = parse_float(float_str) + if isinstance(float_value, (dict, list)): + raise ValueError("parse_float must not return dicts or lists") + return float_value + + return safe_parse_float diff --git a/setuptools/_vendor/tomli/_re.py b/setuptools/_vendor/tomli/_re.py new file mode 100644 index 00000000..994bb749 --- /dev/null +++ b/setuptools/_vendor/tomli/_re.py @@ -0,0 +1,107 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +from __future__ import annotations + +from datetime import date, datetime, time, timedelta, timezone, tzinfo +from functools import lru_cache +import re +from typing import Any + +from ._types import ParseFloat + +# E.g. +# - 00:32:00.999999 +# - 00:32:00 +_TIME_RE_STR = r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?" + +RE_NUMBER = re.compile( + r""" +0 +(?: + x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex + | + b[01](?:_?[01])* # bin + | + o[0-7](?:_?[0-7])* # oct +) +| +[+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part +(?P<floatpart> + (?:\.[0-9](?:_?[0-9])*)? # optional fractional part + (?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part +) +""", + flags=re.VERBOSE, +) +RE_LOCALTIME = re.compile(_TIME_RE_STR) +RE_DATETIME = re.compile( + rf""" +([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27 +(?: + [Tt ] + {_TIME_RE_STR} + (?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset +)? +""", + flags=re.VERBOSE, +) + + +def match_to_datetime(match: re.Match) -> datetime | date: + """Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`. + + Raises ValueError if the match does not correspond to a valid date + or datetime. + """ + ( + year_str, + month_str, + day_str, + hour_str, + minute_str, + sec_str, + micros_str, + zulu_time, + offset_sign_str, + offset_hour_str, + offset_minute_str, + ) = match.groups() + year, month, day = int(year_str), int(month_str), int(day_str) + if hour_str is None: + return date(year, month, day) + hour, minute, sec = int(hour_str), int(minute_str), int(sec_str) + micros = int(micros_str.ljust(6, "0")) if micros_str else 0 + if offset_sign_str: + tz: tzinfo | None = cached_tz( + offset_hour_str, offset_minute_str, offset_sign_str + ) + elif zulu_time: + tz = timezone.utc + else: # local date-time + tz = None + return datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz) + + +@lru_cache(maxsize=None) +def cached_tz(hour_str: str, minute_str: str, sign_str: str) -> timezone: + sign = 1 if sign_str == "+" else -1 + return timezone( + timedelta( + hours=sign * int(hour_str), + minutes=sign * int(minute_str), + ) + ) + + +def match_to_localtime(match: re.Match) -> time: + hour_str, minute_str, sec_str, micros_str = match.groups() + micros = int(micros_str.ljust(6, "0")) if micros_str else 0 + return time(int(hour_str), int(minute_str), int(sec_str), micros) + + +def match_to_number(match: re.Match, parse_float: ParseFloat) -> Any: + if match.group("floatpart"): + return parse_float(match.group()) + return int(match.group(), 0) diff --git a/setuptools/_vendor/tomli/_types.py b/setuptools/_vendor/tomli/_types.py new file mode 100644 index 00000000..d949412e --- /dev/null +++ b/setuptools/_vendor/tomli/_types.py @@ -0,0 +1,10 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +from typing import Any, Callable, Tuple + +# Type annotations +ParseFloat = Callable[[str], Any] +Key = Tuple[str, ...] +Pos = int diff --git a/setuptools/_vendor/tomli/py.typed b/setuptools/_vendor/tomli/py.typed new file mode 100644 index 00000000..7632ecf7 --- /dev/null +++ b/setuptools/_vendor/tomli/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561 diff --git a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/INSTALLER b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/LICENSE b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/LICENSE new file mode 100644 index 00000000..583f9f6e --- /dev/null +++ b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/LICENSE @@ -0,0 +1,254 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations (now Zope +Corporation, see http://www.zope.com). In 2001, the Python Software +Foundation (PSF, see http://www.python.org/psf/) was formed, a +non-profit organization created specifically to own Python-related +Intellectual Property. Zope Corporation is a sponsoring member of +the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are +retained in Python alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/METADATA b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/METADATA new file mode 100644 index 00000000..fe10dfd0 --- /dev/null +++ b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/METADATA @@ -0,0 +1,35 @@ +Metadata-Version: 2.1 +Name: typing_extensions +Version: 4.0.1 +Summary: Backported and Experimental Type Hints for Python 3.6+ +Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing +Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee" <levkivskyi@gmail.com> +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Software Development +Project-URL: Home, https://github.com/python/typing/blob/master/typing_extensions/README.rst + +Typing Extensions -- Backported and Experimental Type Hints for Python + +The ``typing`` module was added to the standard library in Python 3.5, but +many new features have been added to the module since then. +This means users of older Python versions who are unable to upgrade will not be +able to take advantage of new types added to the ``typing`` module, such as +``typing.Protocol`` or ``typing.TypedDict``. + +The ``typing_extensions`` module contains backports of these changes. +Experimental types that may eventually be added to the ``typing`` +module are also included in ``typing_extensions``. + diff --git a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/RECORD b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/RECORD new file mode 100644 index 00000000..9a7f6007 --- /dev/null +++ b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/RECORD @@ -0,0 +1,8 @@ +__pycache__/typing_extensions.cpython-310.pyc,,
+typing_extensions-4.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+typing_extensions-4.0.1.dist-info/LICENSE,sha256=_xfOlOECAk3raHc-scx0ynbaTmWPNzUx8Kwi1oprsa0,12755
+typing_extensions-4.0.1.dist-info/METADATA,sha256=iZ_5HONZZBXtF4kroz-IPZYIl9M8IE1B00R82dWcBqE,1736
+typing_extensions-4.0.1.dist-info/RECORD,,
+typing_extensions-4.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+typing_extensions-4.0.1.dist-info/WHEEL,sha256=LVOPL_YDMEiGvRLgDK1hLkfhFCnTcxcAYZJtpNFses0,81
+typing_extensions.py,sha256=1uqi_RSlI7gos4eJB_NEV3d5wQwzTUQHd3_jrkbTo8Q,87149
diff --git a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/REQUESTED b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/REQUESTED diff --git a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/WHEEL b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/WHEEL new file mode 100644 index 00000000..884ceb56 --- /dev/null +++ b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.5.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/setuptools/_vendor/typing_extensions.py b/setuptools/_vendor/typing_extensions.py new file mode 100644 index 00000000..9f1c7aa3 --- /dev/null +++ b/setuptools/_vendor/typing_extensions.py @@ -0,0 +1,2296 @@ +import abc +import collections +import collections.abc +import operator +import sys +import typing + +# After PEP 560, internal typing API was substantially reworked. +# This is especially important for Protocol class which uses internal APIs +# quite extensively. +PEP_560 = sys.version_info[:3] >= (3, 7, 0) + +if PEP_560: + GenericMeta = type +else: + # 3.6 + from typing import GenericMeta, _type_vars # noqa + +# The two functions below are copies of typing internal helpers. +# They are needed by _ProtocolMeta + + +def _no_slots_copy(dct): + dict_copy = dict(dct) + if '__slots__' in dict_copy: + for slot in dict_copy['__slots__']: + dict_copy.pop(slot, None) + return dict_copy + + +def _check_generic(cls, parameters): + if not cls.__parameters__: + raise TypeError(f"{cls} is not a generic class") + alen = len(parameters) + elen = len(cls.__parameters__) + if alen != elen: + raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments for {cls};" + f" actual {alen}, expected {elen}") + + +# Please keep __all__ alphabetized within each category. +__all__ = [ + # Super-special typing primitives. + 'ClassVar', + 'Concatenate', + 'Final', + 'ParamSpec', + 'Self', + 'Type', + + # ABCs (from collections.abc). + 'Awaitable', + 'AsyncIterator', + 'AsyncIterable', + 'Coroutine', + 'AsyncGenerator', + 'AsyncContextManager', + 'ChainMap', + + # Concrete collection types. + 'ContextManager', + 'Counter', + 'Deque', + 'DefaultDict', + 'OrderedDict', + 'TypedDict', + + # Structural checks, a.k.a. protocols. + 'SupportsIndex', + + # One-off things. + 'Annotated', + 'final', + 'IntVar', + 'Literal', + 'NewType', + 'overload', + 'Protocol', + 'runtime', + 'runtime_checkable', + 'Text', + 'TypeAlias', + 'TypeGuard', + 'TYPE_CHECKING', +] + +if PEP_560: + __all__.extend(["get_args", "get_origin", "get_type_hints"]) + +# 3.6.2+ +if hasattr(typing, 'NoReturn'): + NoReturn = typing.NoReturn +# 3.6.0-3.6.1 +else: + class _NoReturn(typing._FinalTypingBase, _root=True): + """Special type indicating functions that never return. + Example:: + + from typing import NoReturn + + def stop() -> NoReturn: + raise Exception('no way') + + This type is invalid in other positions, e.g., ``List[NoReturn]`` + will fail in static type checkers. + """ + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError("NoReturn cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("NoReturn cannot be used with issubclass().") + + NoReturn = _NoReturn(_root=True) + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = typing.TypeVar('T') # Any type. +KT = typing.TypeVar('KT') # Key type. +VT = typing.TypeVar('VT') # Value type. +T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. +T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. + +ClassVar = typing.ClassVar + +# On older versions of typing there is an internal class named "Final". +# 3.8+ +if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7): + Final = typing.Final +# 3.7 +elif sys.version_info[:2] >= (3, 7): + class _FinalForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only single type') + return typing._GenericAlias(self, (item,)) + + Final = _FinalForm('Final', + doc="""A special typing construct to indicate that a name + cannot be re-assigned or overridden in a subclass. + For example: + + MAX_SIZE: Final = 9000 + MAX_SIZE += 1 # Error reported by type checker + + class Connection: + TIMEOUT: Final[int] = 10 + class FastConnector(Connection): + TIMEOUT = 1 # Error reported by type checker + + There is no runtime checking of these properties.""") +# 3.6 +else: + class _Final(typing._FinalTypingBase, _root=True): + """A special typing construct to indicate that a name + cannot be re-assigned or overridden in a subclass. + For example: + + MAX_SIZE: Final = 9000 + MAX_SIZE += 1 # Error reported by type checker + + class Connection: + TIMEOUT: Final[int] = 10 + class FastConnector(Connection): + TIMEOUT = 1 # Error reported by type checker + + There is no runtime checking of these properties. + """ + + __slots__ = ('__type__',) + + def __init__(self, tp=None, **kwds): + self.__type__ = tp + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is None: + return cls(typing._type_check(item, + f'{cls.__name__[1:]} accepts only single type.'), + _root=True) + raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') + + def _eval_type(self, globalns, localns): + new_tp = typing._eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)(new_tp, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += f'[{typing._type_repr(self.__type__)}]' + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, _Final): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + Final = _Final(_root=True) + + +# 3.8+ +if hasattr(typing, 'final'): + final = typing.final +# 3.6-3.7 +else: + def final(f): + """This decorator can be used to indicate to type checkers that + the decorated method cannot be overridden, and decorated class + cannot be subclassed. For example: + + class Base: + @final + def done(self) -> None: + ... + class Sub(Base): + def done(self) -> None: # Error reported by type checker + ... + @final + class Leaf: + ... + class Other(Leaf): # Error reported by type checker + ... + + There is no runtime checking of these properties. + """ + return f + + +def IntVar(name): + return typing.TypeVar(name) + + +# 3.8+: +if hasattr(typing, 'Literal'): + Literal = typing.Literal +# 3.7: +elif sys.version_info[:2] >= (3, 7): + class _LiteralForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + return typing._GenericAlias(self, parameters) + + Literal = _LiteralForm('Literal', + doc="""A type that can be used to indicate to type checkers + that the corresponding value has a value literally equivalent + to the provided parameter. For example: + + var: Literal[4] = 4 + + The type checker understands that 'var' is literally equal to + the value 4 and no other value. + + Literal[...] cannot be subclassed. There is no runtime + checking verifying that the parameter is actually a value + instead of a type.""") +# 3.6: +else: + class _Literal(typing._FinalTypingBase, _root=True): + """A type that can be used to indicate to type checkers that the + corresponding value has a value literally equivalent to the + provided parameter. For example: + + var: Literal[4] = 4 + + The type checker understands that 'var' is literally equal to the + value 4 and no other value. + + Literal[...] cannot be subclassed. There is no runtime checking + verifying that the parameter is actually a value instead of a type. + """ + + __slots__ = ('__values__',) + + def __init__(self, values=None, **kwds): + self.__values__ = values + + def __getitem__(self, values): + cls = type(self) + if self.__values__ is None: + if not isinstance(values, tuple): + values = (values,) + return cls(values, _root=True) + raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') + + def _eval_type(self, globalns, localns): + return self + + def __repr__(self): + r = super().__repr__() + if self.__values__ is not None: + r += f'[{", ".join(map(typing._type_repr, self.__values__))}]' + return r + + def __hash__(self): + return hash((type(self).__name__, self.__values__)) + + def __eq__(self, other): + if not isinstance(other, _Literal): + return NotImplemented + if self.__values__ is not None: + return self.__values__ == other.__values__ + return self is other + + Literal = _Literal(_root=True) + + +_overload_dummy = typing._overload_dummy # noqa +overload = typing.overload + + +# This is not a real generic class. Don't use outside annotations. +Type = typing.Type + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. + + +class _ExtensionsGenericMeta(GenericMeta): + def __subclasscheck__(self, subclass): + """This mimics a more modern GenericMeta.__subclasscheck__() logic + (that does not have problems with recursion) to work around interactions + between collections, typing, and typing_extensions on older + versions of Python, see https://github.com/python/typing/issues/501. + """ + if self.__origin__ is not None: + if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: + raise TypeError("Parameterized generics cannot be used with class " + "or instance checks") + return False + if not self.__extra__: + return super().__subclasscheck__(subclass) + res = self.__extra__.__subclasshook__(subclass) + if res is not NotImplemented: + return res + if self.__extra__ in subclass.__mro__: + return True + for scls in self.__extra__.__subclasses__(): + if isinstance(scls, GenericMeta): + continue + if issubclass(subclass, scls): + return True + return False + + +Awaitable = typing.Awaitable +Coroutine = typing.Coroutine +AsyncIterable = typing.AsyncIterable +AsyncIterator = typing.AsyncIterator + +# 3.6.1+ +if hasattr(typing, 'Deque'): + Deque = typing.Deque +# 3.6.0 +else: + class Deque(collections.deque, typing.MutableSequence[T], + metaclass=_ExtensionsGenericMeta, + extra=collections.deque): + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is Deque: + return collections.deque(*args, **kwds) + return typing._generic_new(collections.deque, cls, *args, **kwds) + +ContextManager = typing.ContextManager +# 3.6.2+ +if hasattr(typing, 'AsyncContextManager'): + AsyncContextManager = typing.AsyncContextManager +# 3.6.0-3.6.1 +else: + from _collections_abc import _check_methods as _check_methods_in_mro # noqa + + class AsyncContextManager(typing.Generic[T_co]): + __slots__ = () + + async def __aenter__(self): + return self + + @abc.abstractmethod + async def __aexit__(self, exc_type, exc_value, traceback): + return None + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncContextManager: + return _check_methods_in_mro(C, "__aenter__", "__aexit__") + return NotImplemented + +DefaultDict = typing.DefaultDict + +# 3.7.2+ +if hasattr(typing, 'OrderedDict'): + OrderedDict = typing.OrderedDict +# 3.7.0-3.7.2 +elif (3, 7, 0) <= sys.version_info[:3] < (3, 7, 2): + OrderedDict = typing._alias(collections.OrderedDict, (KT, VT)) +# 3.6 +else: + class OrderedDict(collections.OrderedDict, typing.MutableMapping[KT, VT], + metaclass=_ExtensionsGenericMeta, + extra=collections.OrderedDict): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is OrderedDict: + return collections.OrderedDict(*args, **kwds) + return typing._generic_new(collections.OrderedDict, cls, *args, **kwds) + +# 3.6.2+ +if hasattr(typing, 'Counter'): + Counter = typing.Counter +# 3.6.0-3.6.1 +else: + class Counter(collections.Counter, + typing.Dict[T, int], + metaclass=_ExtensionsGenericMeta, extra=collections.Counter): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is Counter: + return collections.Counter(*args, **kwds) + return typing._generic_new(collections.Counter, cls, *args, **kwds) + +# 3.6.1+ +if hasattr(typing, 'ChainMap'): + ChainMap = typing.ChainMap +elif hasattr(collections, 'ChainMap'): + class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT], + metaclass=_ExtensionsGenericMeta, + extra=collections.ChainMap): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is ChainMap: + return collections.ChainMap(*args, **kwds) + return typing._generic_new(collections.ChainMap, cls, *args, **kwds) + +# 3.6.1+ +if hasattr(typing, 'AsyncGenerator'): + AsyncGenerator = typing.AsyncGenerator +# 3.6.0 +else: + class AsyncGenerator(AsyncIterator[T_co], typing.Generic[T_co, T_contra], + metaclass=_ExtensionsGenericMeta, + extra=collections.abc.AsyncGenerator): + __slots__ = () + +NewType = typing.NewType +Text = typing.Text +TYPE_CHECKING = typing.TYPE_CHECKING + + +def _gorg(cls): + """This function exists for compatibility with old typing versions.""" + assert isinstance(cls, GenericMeta) + if hasattr(cls, '_gorg'): + return cls._gorg + while cls.__origin__ is not None: + cls = cls.__origin__ + return cls + + +_PROTO_WHITELIST = ['Callable', 'Awaitable', + 'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator', + 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', + 'ContextManager', 'AsyncContextManager'] + + +def _get_protocol_attrs(cls): + attrs = set() + for base in cls.__mro__[:-1]: # without object + if base.__name__ in ('Protocol', 'Generic'): + continue + annotations = getattr(base, '__annotations__', {}) + for attr in list(base.__dict__.keys()) + list(annotations.keys()): + if (not attr.startswith('_abc_') and attr not in ( + '__abstractmethods__', '__annotations__', '__weakref__', + '_is_protocol', '_is_runtime_protocol', '__dict__', + '__args__', '__slots__', + '__next_in_mro__', '__parameters__', '__origin__', + '__orig_bases__', '__extra__', '__tree_hash__', + '__doc__', '__subclasshook__', '__init__', '__new__', + '__module__', '_MutableMapping__marker', '_gorg')): + attrs.add(attr) + return attrs + + +def _is_callable_members_only(cls): + return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls)) + + +# 3.8+ +if hasattr(typing, 'Protocol'): + Protocol = typing.Protocol +# 3.7 +elif PEP_560: + from typing import _collect_type_vars # noqa + + def _no_init(self, *args, **kwargs): + if type(self)._is_protocol: + raise TypeError('Protocols cannot be instantiated') + + class _ProtocolMeta(abc.ABCMeta): + # This metaclass is a bit unfortunate and exists only because of the lack + # of __instancehook__. + def __instancecheck__(cls, instance): + # We need this method for situations where attributes are + # assigned in __init__. + if ((not getattr(cls, '_is_protocol', False) or + _is_callable_members_only(cls)) and + issubclass(instance.__class__, cls)): + return True + if cls._is_protocol: + if all(hasattr(instance, attr) and + (not callable(getattr(cls, attr, None)) or + getattr(instance, attr) is not None) + for attr in _get_protocol_attrs(cls)): + return True + return super().__instancecheck__(instance) + + class Protocol(metaclass=_ProtocolMeta): + # There is quite a lot of overlapping code with typing.Generic. + # Unfortunately it is hard to avoid this while these live in two different + # modules. The duplicated code will be removed when Protocol is moved to typing. + """Base class for protocol classes. Protocol classes are defined as:: + + class Proto(Protocol): + def meth(self) -> int: + ... + + Such classes are primarily used with static type checkers that recognize + structural subtyping (static duck-typing), for example:: + + class C: + def meth(self) -> int: + return 0 + + def func(x: Proto) -> int: + return x.meth() + + func(C()) # Passes static type check + + See PEP 544 for details. Protocol classes decorated with + @typing_extensions.runtime act as simple-minded runtime protocol that checks + only the presence of given attributes, ignoring their type signatures. + + Protocol classes can be generic, they are defined as:: + + class GenProto(Protocol[T]): + def meth(self) -> T: + ... + """ + __slots__ = () + _is_protocol = True + + def __new__(cls, *args, **kwds): + if cls is Protocol: + raise TypeError("Type Protocol cannot be instantiated; " + "it can only be used as a base class") + return super().__new__(cls) + + @typing._tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple): + params = (params,) + if not params and cls is not typing.Tuple: + raise TypeError( + f"Parameter list to {cls.__qualname__}[...] cannot be empty") + msg = "Parameters to generic types must be types." + params = tuple(typing._type_check(p, msg) for p in params) # noqa + if cls is Protocol: + # Generic can only be subscripted with unique type variables. + if not all(isinstance(p, typing.TypeVar) for p in params): + i = 0 + while isinstance(params[i], typing.TypeVar): + i += 1 + raise TypeError( + "Parameters to Protocol[...] must all be type variables." + f" Parameter {i + 1} is {params[i]}") + if len(set(params)) != len(params): + raise TypeError( + "Parameters to Protocol[...] must all be unique") + else: + # Subscripting a regular Generic subclass. + _check_generic(cls, params) + return typing._GenericAlias(cls, params) + + def __init_subclass__(cls, *args, **kwargs): + tvars = [] + if '__orig_bases__' in cls.__dict__: + error = typing.Generic in cls.__orig_bases__ + else: + error = typing.Generic in cls.__bases__ + if error: + raise TypeError("Cannot inherit from plain Generic") + if '__orig_bases__' in cls.__dict__: + tvars = _collect_type_vars(cls.__orig_bases__) + # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn]. + # If found, tvars must be a subset of it. + # If not found, tvars is it. + # Also check for and reject plain Generic, + # and reject multiple Generic[...] and/or Protocol[...]. + gvars = None + for base in cls.__orig_bases__: + if (isinstance(base, typing._GenericAlias) and + base.__origin__ in (typing.Generic, Protocol)): + # for error messages + the_base = base.__origin__.__name__ + if gvars is not None: + raise TypeError( + "Cannot inherit from Generic[...]" + " and/or Protocol[...] multiple types.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + s_vars = ', '.join(str(t) for t in tvars if t not in gvarset) + s_args = ', '.join(str(g) for g in gvars) + raise TypeError(f"Some type variables ({s_vars}) are" + f" not listed in {the_base}[{s_args}]") + tvars = gvars + cls.__parameters__ = tuple(tvars) + + # Determine if this is a protocol or a concrete subclass. + if not cls.__dict__.get('_is_protocol', None): + cls._is_protocol = any(b is Protocol for b in cls.__bases__) + + # Set (or override) the protocol subclass hook. + def _proto_hook(other): + if not cls.__dict__.get('_is_protocol', None): + return NotImplemented + if not getattr(cls, '_is_runtime_protocol', False): + if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: + return NotImplemented + raise TypeError("Instance and class checks can only be used with" + " @runtime protocols") + if not _is_callable_members_only(cls): + if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: + return NotImplemented + raise TypeError("Protocols with non-method members" + " don't support issubclass()") + if not isinstance(other, type): + # Same error as for issubclass(1, int) + raise TypeError('issubclass() arg 1 must be a class') + for attr in _get_protocol_attrs(cls): + for base in other.__mro__: + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + annotations = getattr(base, '__annotations__', {}) + if (isinstance(annotations, typing.Mapping) and + attr in annotations and + isinstance(other, _ProtocolMeta) and + other._is_protocol): + break + else: + return NotImplemented + return True + if '__subclasshook__' not in cls.__dict__: + cls.__subclasshook__ = _proto_hook + + # We have nothing more to do for non-protocols. + if not cls._is_protocol: + return + + # Check consistency of bases. + for base in cls.__bases__: + if not (base in (object, typing.Generic) or + base.__module__ == 'collections.abc' and + base.__name__ in _PROTO_WHITELIST or + isinstance(base, _ProtocolMeta) and base._is_protocol): + raise TypeError('Protocols can only inherit from other' + f' protocols, got {repr(base)}') + cls.__init__ = _no_init +# 3.6 +else: + from typing import _next_in_mro, _type_check # noqa + + def _no_init(self, *args, **kwargs): + if type(self)._is_protocol: + raise TypeError('Protocols cannot be instantiated') + + class _ProtocolMeta(GenericMeta): + """Internal metaclass for Protocol. + + This exists so Protocol classes can be generic without deriving + from Generic. + """ + def __new__(cls, name, bases, namespace, + tvars=None, args=None, origin=None, extra=None, orig_bases=None): + # This is just a version copied from GenericMeta.__new__ that + # includes "Protocol" special treatment. (Comments removed for brevity.) + assert extra is None # Protocols should not have extra + if tvars is not None: + assert origin is not None + assert all(isinstance(t, typing.TypeVar) for t in tvars), tvars + else: + tvars = _type_vars(bases) + gvars = None + for base in bases: + if base is typing.Generic: + raise TypeError("Cannot inherit from plain Generic") + if (isinstance(base, GenericMeta) and + base.__origin__ in (typing.Generic, Protocol)): + if gvars is not None: + raise TypeError( + "Cannot inherit from Generic[...] or" + " Protocol[...] multiple times.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + s_vars = ", ".join(str(t) for t in tvars if t not in gvarset) + s_args = ", ".join(str(g) for g in gvars) + cls_name = "Generic" if any(b.__origin__ is typing.Generic + for b in bases) else "Protocol" + raise TypeError(f"Some type variables ({s_vars}) are" + f" not listed in {cls_name}[{s_args}]") + tvars = gvars + + initial_bases = bases + if (extra is not None and type(extra) is abc.ABCMeta and + extra not in bases): + bases = (extra,) + bases + bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b + for b in bases) + if any(isinstance(b, GenericMeta) and b is not typing.Generic for b in bases): + bases = tuple(b for b in bases if b is not typing.Generic) + namespace.update({'__origin__': origin, '__extra__': extra}) + self = super(GenericMeta, cls).__new__(cls, name, bases, namespace, + _root=True) + super(GenericMeta, self).__setattr__('_gorg', + self if not origin else + _gorg(origin)) + self.__parameters__ = tvars + self.__args__ = tuple(... if a is typing._TypingEllipsis else + () if a is typing._TypingEmpty else + a for a in args) if args else None + self.__next_in_mro__ = _next_in_mro(self) + if orig_bases is None: + self.__orig_bases__ = initial_bases + elif origin is not None: + self._abc_registry = origin._abc_registry + self._abc_cache = origin._abc_cache + if hasattr(self, '_subs_tree'): + self.__tree_hash__ = (hash(self._subs_tree()) if origin else + super(GenericMeta, self).__hash__()) + return self + + def __init__(cls, *args, **kwargs): + super().__init__(*args, **kwargs) + if not cls.__dict__.get('_is_protocol', None): + cls._is_protocol = any(b is Protocol or + isinstance(b, _ProtocolMeta) and + b.__origin__ is Protocol + for b in cls.__bases__) + if cls._is_protocol: + for base in cls.__mro__[1:]: + if not (base in (object, typing.Generic) or + base.__module__ == 'collections.abc' and + base.__name__ in _PROTO_WHITELIST or + isinstance(base, typing.TypingMeta) and base._is_protocol or + isinstance(base, GenericMeta) and + base.__origin__ is typing.Generic): + raise TypeError(f'Protocols can only inherit from other' + f' protocols, got {repr(base)}') + + cls.__init__ = _no_init + + def _proto_hook(other): + if not cls.__dict__.get('_is_protocol', None): + return NotImplemented + if not isinstance(other, type): + # Same error as for issubclass(1, int) + raise TypeError('issubclass() arg 1 must be a class') + for attr in _get_protocol_attrs(cls): + for base in other.__mro__: + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + annotations = getattr(base, '__annotations__', {}) + if (isinstance(annotations, typing.Mapping) and + attr in annotations and + isinstance(other, _ProtocolMeta) and + other._is_protocol): + break + else: + return NotImplemented + return True + if '__subclasshook__' not in cls.__dict__: + cls.__subclasshook__ = _proto_hook + + def __instancecheck__(self, instance): + # We need this method for situations where attributes are + # assigned in __init__. + if ((not getattr(self, '_is_protocol', False) or + _is_callable_members_only(self)) and + issubclass(instance.__class__, self)): + return True + if self._is_protocol: + if all(hasattr(instance, attr) and + (not callable(getattr(self, attr, None)) or + getattr(instance, attr) is not None) + for attr in _get_protocol_attrs(self)): + return True + return super(GenericMeta, self).__instancecheck__(instance) + + def __subclasscheck__(self, cls): + if self.__origin__ is not None: + if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: + raise TypeError("Parameterized generics cannot be used with class " + "or instance checks") + return False + if (self.__dict__.get('_is_protocol', None) and + not self.__dict__.get('_is_runtime_protocol', None)): + if sys._getframe(1).f_globals['__name__'] in ['abc', + 'functools', + 'typing']: + return False + raise TypeError("Instance and class checks can only be used with" + " @runtime protocols") + if (self.__dict__.get('_is_runtime_protocol', None) and + not _is_callable_members_only(self)): + if sys._getframe(1).f_globals['__name__'] in ['abc', + 'functools', + 'typing']: + return super(GenericMeta, self).__subclasscheck__(cls) + raise TypeError("Protocols with non-method members" + " don't support issubclass()") + return super(GenericMeta, self).__subclasscheck__(cls) + + @typing._tp_cache + def __getitem__(self, params): + # We also need to copy this from GenericMeta.__getitem__ to get + # special treatment of "Protocol". (Comments removed for brevity.) + if not isinstance(params, tuple): + params = (params,) + if not params and _gorg(self) is not typing.Tuple: + raise TypeError( + f"Parameter list to {self.__qualname__}[...] cannot be empty") + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + if self in (typing.Generic, Protocol): + if not all(isinstance(p, typing.TypeVar) for p in params): + raise TypeError( + f"Parameters to {repr(self)}[...] must all be type variables") + if len(set(params)) != len(params): + raise TypeError( + f"Parameters to {repr(self)}[...] must all be unique") + tvars = params + args = params + elif self in (typing.Tuple, typing.Callable): + tvars = _type_vars(params) + args = params + elif self.__origin__ in (typing.Generic, Protocol): + raise TypeError(f"Cannot subscript already-subscripted {repr(self)}") + else: + _check_generic(self, params) + tvars = _type_vars(params) + args = params + + prepend = (self,) if self.__origin__ is None else () + return self.__class__(self.__name__, + prepend + self.__bases__, + _no_slots_copy(self.__dict__), + tvars=tvars, + args=args, + origin=self, + extra=self.__extra__, + orig_bases=self.__orig_bases__) + + class Protocol(metaclass=_ProtocolMeta): + """Base class for protocol classes. Protocol classes are defined as:: + + class Proto(Protocol): + def meth(self) -> int: + ... + + Such classes are primarily used with static type checkers that recognize + structural subtyping (static duck-typing), for example:: + + class C: + def meth(self) -> int: + return 0 + + def func(x: Proto) -> int: + return x.meth() + + func(C()) # Passes static type check + + See PEP 544 for details. Protocol classes decorated with + @typing_extensions.runtime act as simple-minded runtime protocol that checks + only the presence of given attributes, ignoring their type signatures. + + Protocol classes can be generic, they are defined as:: + + class GenProto(Protocol[T]): + def meth(self) -> T: + ... + """ + __slots__ = () + _is_protocol = True + + def __new__(cls, *args, **kwds): + if _gorg(cls) is Protocol: + raise TypeError("Type Protocol cannot be instantiated; " + "it can be used only as a base class") + return typing._generic_new(cls.__next_in_mro__, cls, *args, **kwds) + + +# 3.8+ +if hasattr(typing, 'runtime_checkable'): + runtime_checkable = typing.runtime_checkable +# 3.6-3.7 +else: + def runtime_checkable(cls): + """Mark a protocol class as a runtime protocol, so that it + can be used with isinstance() and issubclass(). Raise TypeError + if applied to a non-protocol class. + + This allows a simple-minded structural check very similar to the + one-offs in collections.abc such as Hashable. + """ + if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol: + raise TypeError('@runtime_checkable can be only applied to protocol classes,' + f' got {cls!r}') + cls._is_runtime_protocol = True + return cls + + +# Exists for backwards compatibility. +runtime = runtime_checkable + + +# 3.8+ +if hasattr(typing, 'SupportsIndex'): + SupportsIndex = typing.SupportsIndex +# 3.6-3.7 +else: + @runtime_checkable + class SupportsIndex(Protocol): + __slots__ = () + + @abc.abstractmethod + def __index__(self) -> int: + pass + + +if sys.version_info >= (3, 9, 2): + # The standard library TypedDict in Python 3.8 does not store runtime information + # about which (if any) keys are optional. See https://bugs.python.org/issue38834 + # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" + # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 + TypedDict = typing.TypedDict +else: + def _check_fails(cls, other): + try: + if sys._getframe(1).f_globals['__name__'] not in ['abc', + 'functools', + 'typing']: + # Typed dicts are only for static structural subtyping. + raise TypeError('TypedDict does not support instance and class checks') + except (AttributeError, ValueError): + pass + return False + + def _dict_new(*args, **kwargs): + if not args: + raise TypeError('TypedDict.__new__(): not enough arguments') + _, args = args[0], args[1:] # allow the "cls" keyword be passed + return dict(*args, **kwargs) + + _dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)' + + def _typeddict_new(*args, total=True, **kwargs): + if not args: + raise TypeError('TypedDict.__new__(): not enough arguments') + _, args = args[0], args[1:] # allow the "cls" keyword be passed + if args: + typename, args = args[0], args[1:] # allow the "_typename" keyword be passed + elif '_typename' in kwargs: + typename = kwargs.pop('_typename') + import warnings + warnings.warn("Passing '_typename' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError("TypedDict.__new__() missing 1 required positional " + "argument: '_typename'") + if args: + try: + fields, = args # allow the "_fields" keyword be passed + except ValueError: + raise TypeError('TypedDict.__new__() takes from 2 to 3 ' + f'positional arguments but {len(args) + 2} ' + 'were given') + elif '_fields' in kwargs and len(kwargs) == 1: + fields = kwargs.pop('_fields') + import warnings + warnings.warn("Passing '_fields' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + fields = None + + if fields is None: + fields = kwargs + elif kwargs: + raise TypeError("TypedDict takes either a dict or keyword arguments," + " but not both") + + ns = {'__annotations__': dict(fields)} + try: + # Setting correct module is necessary to make typed dict classes pickleable. + ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + + return _TypedDictMeta(typename, (), ns, total=total) + + _typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,' + ' /, *, total=True, **kwargs)') + + class _TypedDictMeta(type): + def __init__(cls, name, bases, ns, total=True): + super().__init__(name, bases, ns) + + def __new__(cls, name, bases, ns, total=True): + # Create new typed dict class object. + # This method is called directly when TypedDict is subclassed, + # or via _typeddict_new when TypedDict is instantiated. This way + # TypedDict supports all three syntaxes described in its docstring. + # Subclasses and instances of TypedDict return actual dictionaries + # via _dict_new. + ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new + tp_dict = super().__new__(cls, name, (dict,), ns) + + annotations = {} + own_annotations = ns.get('__annotations__', {}) + own_annotation_keys = set(own_annotations.keys()) + msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" + own_annotations = { + n: typing._type_check(tp, msg) for n, tp in own_annotations.items() + } + required_keys = set() + optional_keys = set() + + for base in bases: + annotations.update(base.__dict__.get('__annotations__', {})) + required_keys.update(base.__dict__.get('__required_keys__', ())) + optional_keys.update(base.__dict__.get('__optional_keys__', ())) + + annotations.update(own_annotations) + if total: + required_keys.update(own_annotation_keys) + else: + optional_keys.update(own_annotation_keys) + + tp_dict.__annotations__ = annotations + tp_dict.__required_keys__ = frozenset(required_keys) + tp_dict.__optional_keys__ = frozenset(optional_keys) + if not hasattr(tp_dict, '__total__'): + tp_dict.__total__ = total + return tp_dict + + __instancecheck__ = __subclasscheck__ = _check_fails + + TypedDict = _TypedDictMeta('TypedDict', (dict,), {}) + TypedDict.__module__ = __name__ + TypedDict.__doc__ = \ + """A simple typed name space. At runtime it is equivalent to a plain dict. + + TypedDict creates a dictionary type that expects all of its + instances to have a certain set of keys, with each key + associated with a value of a consistent type. This expectation + is not checked at runtime but is only enforced by type checkers. + Usage:: + + class Point2D(TypedDict): + x: int + y: int + label: str + + a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK + b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check + + assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') + + The type info can be accessed via the Point2D.__annotations__ dict, and + the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. + TypedDict supports two additional equivalent forms:: + + Point2D = TypedDict('Point2D', x=int, y=int, label=str) + Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) + + The class syntax is only supported in Python 3.6+, while two other + syntax forms work for Python 2.7 and 3.2+ + """ + + +# Python 3.9+ has PEP 593 (Annotated and modified get_type_hints) +if hasattr(typing, 'Annotated'): + Annotated = typing.Annotated + get_type_hints = typing.get_type_hints + # Not exported and not a public API, but needed for get_origin() and get_args() + # to work. + _AnnotatedAlias = typing._AnnotatedAlias +# 3.7-3.8 +elif PEP_560: + class _AnnotatedAlias(typing._GenericAlias, _root=True): + """Runtime representation of an annotated type. + + At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't' + with extra annotations. The alias behaves like a normal typing alias, + instantiating is the same as instantiating the underlying type, binding + it to types is also the same. + """ + def __init__(self, origin, metadata): + if isinstance(origin, _AnnotatedAlias): + metadata = origin.__metadata__ + metadata + origin = origin.__origin__ + super().__init__(origin, origin) + self.__metadata__ = metadata + + def copy_with(self, params): + assert len(params) == 1 + new_type = params[0] + return _AnnotatedAlias(new_type, self.__metadata__) + + def __repr__(self): + return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, " + f"{', '.join(repr(a) for a in self.__metadata__)}]") + + def __reduce__(self): + return operator.getitem, ( + Annotated, (self.__origin__,) + self.__metadata__ + ) + + def __eq__(self, other): + if not isinstance(other, _AnnotatedAlias): + return NotImplemented + if self.__origin__ != other.__origin__: + return False + return self.__metadata__ == other.__metadata__ + + def __hash__(self): + return hash((self.__origin__, self.__metadata__)) + + class Annotated: + """Add context specific metadata to a type. + + Example: Annotated[int, runtime_check.Unsigned] indicates to the + hypothetical runtime_check module that this type is an unsigned int. + Every other consumer of this type can ignore this metadata and treat + this type as int. + + The first argument to Annotated must be a valid type (and will be in + the __origin__ field), the remaining arguments are kept as a tuple in + the __extra__ field. + + Details: + + - It's an error to call `Annotated` with less than two arguments. + - Nested Annotated are flattened:: + + Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] + + - Instantiating an annotated type is equivalent to instantiating the + underlying type:: + + Annotated[C, Ann1](5) == C(5) + + - Annotated can be used as a generic type alias:: + + Optimized = Annotated[T, runtime.Optimize()] + Optimized[int] == Annotated[int, runtime.Optimize()] + + OptimizedList = Annotated[List[T], runtime.Optimize()] + OptimizedList[int] == Annotated[List[int], runtime.Optimize()] + """ + + __slots__ = () + + def __new__(cls, *args, **kwargs): + raise TypeError("Type Annotated cannot be instantiated.") + + @typing._tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple) or len(params) < 2: + raise TypeError("Annotated[...] should be used " + "with at least two arguments (a type and an " + "annotation).") + msg = "Annotated[t, ...]: t must be a type." + origin = typing._type_check(params[0], msg) + metadata = tuple(params[1:]) + return _AnnotatedAlias(origin, metadata) + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError( + f"Cannot subclass {cls.__module__}.Annotated" + ) + + def _strip_annotations(t): + """Strips the annotations from a given type. + """ + if isinstance(t, _AnnotatedAlias): + return _strip_annotations(t.__origin__) + if isinstance(t, typing._GenericAlias): + stripped_args = tuple(_strip_annotations(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + res = t.copy_with(stripped_args) + res._special = t._special + return res + return t + + def get_type_hints(obj, globalns=None, localns=None, include_extras=False): + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, adds Optional[t] if a + default value equal to None is set and recursively replaces all + 'Annotated[T, ...]' with 'T' (unless 'include_extras=True'). + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) + if include_extras: + return hint + return {k: _strip_annotations(t) for k, t in hint.items()} +# 3.6 +else: + + def _is_dunder(name): + """Returns True if name is a __dunder_variable_name__.""" + return len(name) > 4 and name.startswith('__') and name.endswith('__') + + # Prior to Python 3.7 types did not have `copy_with`. A lot of the equality + # checks, argument expansion etc. are done on the _subs_tre. As a result we + # can't provide a get_type_hints function that strips out annotations. + + class AnnotatedMeta(typing.GenericMeta): + """Metaclass for Annotated""" + + def __new__(cls, name, bases, namespace, **kwargs): + if any(b is not object for b in bases): + raise TypeError("Cannot subclass " + str(Annotated)) + return super().__new__(cls, name, bases, namespace, **kwargs) + + @property + def __metadata__(self): + return self._subs_tree()[2] + + def _tree_repr(self, tree): + cls, origin, metadata = tree + if not isinstance(origin, tuple): + tp_repr = typing._type_repr(origin) + else: + tp_repr = origin[0]._tree_repr(origin) + metadata_reprs = ", ".join(repr(arg) for arg in metadata) + return f'{cls}[{tp_repr}, {metadata_reprs}]' + + def _subs_tree(self, tvars=None, args=None): # noqa + if self is Annotated: + return Annotated + res = super()._subs_tree(tvars=tvars, args=args) + # Flatten nested Annotated + if isinstance(res[1], tuple) and res[1][0] is Annotated: + sub_tp = res[1][1] + sub_annot = res[1][2] + return (Annotated, sub_tp, sub_annot + res[2]) + return res + + def _get_cons(self): + """Return the class used to create instance of this type.""" + if self.__origin__ is None: + raise TypeError("Cannot get the underlying type of a " + "non-specialized Annotated type.") + tree = self._subs_tree() + while isinstance(tree, tuple) and tree[0] is Annotated: + tree = tree[1] + if isinstance(tree, tuple): + return tree[0] + else: + return tree + + @typing._tp_cache + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + if self.__origin__ is not None: # specializing an instantiated type + return super().__getitem__(params) + elif not isinstance(params, tuple) or len(params) < 2: + raise TypeError("Annotated[...] should be instantiated " + "with at least two arguments (a type and an " + "annotation).") + else: + msg = "Annotated[t, ...]: t must be a type." + tp = typing._type_check(params[0], msg) + metadata = tuple(params[1:]) + return self.__class__( + self.__name__, + self.__bases__, + _no_slots_copy(self.__dict__), + tvars=_type_vars((tp,)), + # Metadata is a tuple so it won't be touched by _replace_args et al. + args=(tp, metadata), + origin=self, + ) + + def __call__(self, *args, **kwargs): + cons = self._get_cons() + result = cons(*args, **kwargs) + try: + result.__orig_class__ = self + except AttributeError: + pass + return result + + def __getattr__(self, attr): + # For simplicity we just don't relay all dunder names + if self.__origin__ is not None and not _is_dunder(attr): + return getattr(self._get_cons(), attr) + raise AttributeError(attr) + + def __setattr__(self, attr, value): + if _is_dunder(attr) or attr.startswith('_abc_'): + super().__setattr__(attr, value) + elif self.__origin__ is None: + raise AttributeError(attr) + else: + setattr(self._get_cons(), attr, value) + + def __instancecheck__(self, obj): + raise TypeError("Annotated cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Annotated cannot be used with issubclass().") + + class Annotated(metaclass=AnnotatedMeta): + """Add context specific metadata to a type. + + Example: Annotated[int, runtime_check.Unsigned] indicates to the + hypothetical runtime_check module that this type is an unsigned int. + Every other consumer of this type can ignore this metadata and treat + this type as int. + + The first argument to Annotated must be a valid type, the remaining + arguments are kept as a tuple in the __metadata__ field. + + Details: + + - It's an error to call `Annotated` with less than two arguments. + - Nested Annotated are flattened:: + + Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] + + - Instantiating an annotated type is equivalent to instantiating the + underlying type:: + + Annotated[C, Ann1](5) == C(5) + + - Annotated can be used as a generic type alias:: + + Optimized = Annotated[T, runtime.Optimize()] + Optimized[int] == Annotated[int, runtime.Optimize()] + + OptimizedList = Annotated[List[T], runtime.Optimize()] + OptimizedList[int] == Annotated[List[int], runtime.Optimize()] + """ + +# Python 3.8 has get_origin() and get_args() but those implementations aren't +# Annotated-aware, so we can't use those. Python 3.9's versions don't support +# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. +if sys.version_info[:2] >= (3, 10): + get_origin = typing.get_origin + get_args = typing.get_args +# 3.7-3.9 +elif PEP_560: + try: + # 3.9+ + from typing import _BaseGenericAlias + except ImportError: + _BaseGenericAlias = typing._GenericAlias + try: + # 3.9+ + from typing import GenericAlias + except ImportError: + GenericAlias = typing._GenericAlias + + def get_origin(tp): + """Get the unsubscripted version of a type. + + This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar + and Annotated. Return None for unsupported types. Examples:: + + get_origin(Literal[42]) is Literal + get_origin(int) is None + get_origin(ClassVar[int]) is ClassVar + get_origin(Generic) is Generic + get_origin(Generic[T]) is Generic + get_origin(Union[T, int]) is Union + get_origin(List[Tuple[T, T]][int]) == list + get_origin(P.args) is P + """ + if isinstance(tp, _AnnotatedAlias): + return Annotated + if isinstance(tp, (typing._GenericAlias, GenericAlias, _BaseGenericAlias, + ParamSpecArgs, ParamSpecKwargs)): + return tp.__origin__ + if tp is typing.Generic: + return typing.Generic + return None + + def get_args(tp): + """Get type arguments with all substitutions performed. + + For unions, basic simplifications used by Union constructor are performed. + Examples:: + get_args(Dict[str, int]) == (str, int) + get_args(int) == () + get_args(Union[int, Union[T, int], str][int]) == (int, str) + get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + get_args(Callable[[], T][int]) == ([], int) + """ + if isinstance(tp, _AnnotatedAlias): + return (tp.__origin__,) + tp.__metadata__ + if isinstance(tp, (typing._GenericAlias, GenericAlias)): + if getattr(tp, "_special", False): + return () + res = tp.__args__ + if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: + res = (list(res[:-1]), res[-1]) + return res + return () + + +# 3.10+ +if hasattr(typing, 'TypeAlias'): + TypeAlias = typing.TypeAlias +# 3.9 +elif sys.version_info[:2] >= (3, 9): + class _TypeAliasForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + @_TypeAliasForm + def TypeAlias(self, parameters): + """Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example above. + """ + raise TypeError(f"{self} is not subscriptable") +# 3.7-3.8 +elif sys.version_info[:2] >= (3, 7): + class _TypeAliasForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + TypeAlias = _TypeAliasForm('TypeAlias', + doc="""Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example + above.""") +# 3.6 +else: + class _TypeAliasMeta(typing.TypingMeta): + """Metaclass for TypeAlias""" + + def __repr__(self): + return 'typing_extensions.TypeAlias' + + class _TypeAliasBase(typing._FinalTypingBase, metaclass=_TypeAliasMeta, _root=True): + """Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example above. + """ + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError("TypeAlias cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("TypeAlias cannot be used with issubclass().") + + def __repr__(self): + return 'typing_extensions.TypeAlias' + + TypeAlias = _TypeAliasBase(_root=True) + + +# Python 3.10+ has PEP 612 +if hasattr(typing, 'ParamSpecArgs'): + ParamSpecArgs = typing.ParamSpecArgs + ParamSpecKwargs = typing.ParamSpecKwargs +# 3.6-3.9 +else: + class _Immutable: + """Mixin to indicate that object should not be copied.""" + __slots__ = () + + def __copy__(self): + return self + + def __deepcopy__(self, memo): + return self + + class ParamSpecArgs(_Immutable): + """The args for a ParamSpec object. + + Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. + + ParamSpecArgs objects have a reference back to their ParamSpec: + + P.args.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.args" + + class ParamSpecKwargs(_Immutable): + """The kwargs for a ParamSpec object. + + Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. + + ParamSpecKwargs objects have a reference back to their ParamSpec: + + P.kwargs.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.kwargs" + +# 3.10+ +if hasattr(typing, 'ParamSpec'): + ParamSpec = typing.ParamSpec +# 3.6-3.9 +else: + + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class ParamSpec(list): + """Parameter specification variable. + + Usage:: + + P = ParamSpec('P') + + Parameter specification variables exist primarily for the benefit of static + type checkers. They are used to forward the parameter types of one + callable to another callable, a pattern commonly found in higher order + functions and decorators. They are only valid when used in ``Concatenate``, + or s the first argument to ``Callable``. In Python 3.10 and higher, + they are also supported in user-defined Generics at runtime. + See class Generic for more information on generic types. An + example for annotating a decorator:: + + T = TypeVar('T') + P = ParamSpec('P') + + def add_logging(f: Callable[P, T]) -> Callable[P, T]: + '''A type-safe decorator to add logging to a function.''' + def inner(*args: P.args, **kwargs: P.kwargs) -> T: + logging.info(f'{f.__name__} was called') + return f(*args, **kwargs) + return inner + + @add_logging + def add_two(x: float, y: float) -> float: + '''Add two numbers together.''' + return x + y + + Parameter specification variables defined with covariant=True or + contravariant=True can be used to declare covariant or contravariant + generic types. These keyword arguments are valid, but their actual semantics + are yet to be decided. See PEP 612 for details. + + Parameter specification variables can be introspected. e.g.: + + P.__name__ == 'T' + P.__bound__ == None + P.__covariant__ == False + P.__contravariant__ == False + + Note that only parameter specification variables defined in global scope can + be pickled. + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + @property + def args(self): + return ParamSpecArgs(self) + + @property + def kwargs(self): + return ParamSpecKwargs(self) + + def __init__(self, name, *, bound=None, covariant=False, contravariant=False): + super().__init__([self]) + self.__name__ = name + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + if bound: + self.__bound__ = typing._type_check(bound, 'Bound must be a type.') + else: + self.__bound__ = None + + # for pickling: + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + def __repr__(self): + if self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + # Hack to get typing._type_check to pass. + def __call__(self, *args, **kwargs): + pass + + if not PEP_560: + # Only needed in 3.6. + def _get_type_vars(self, tvars): + if self not in tvars: + tvars.append(self) + + +# 3.6-3.9 +if not hasattr(typing, 'Concatenate'): + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class _ConcatenateGenericAlias(list): + + # Trick Generic into looking into this for __parameters__. + if PEP_560: + __class__ = typing._GenericAlias + else: + __class__ = typing._TypingBase + + # Flag in 3.8. + _special = False + # Attribute in 3.6 and earlier. + _gorg = typing.Generic + + def __init__(self, origin, args): + super().__init__(args) + self.__origin__ = origin + self.__args__ = args + + def __repr__(self): + _type_repr = typing._type_repr + return (f'{_type_repr(self.__origin__)}' + f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]') + + def __hash__(self): + return hash((self.__origin__, self.__args__)) + + # Hack to get typing._type_check to pass in Generic. + def __call__(self, *args, **kwargs): + pass + + @property + def __parameters__(self): + return tuple( + tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) + ) + + if not PEP_560: + # Only required in 3.6. + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + typing._get_type_vars(self.__parameters__, tvars) + + +# 3.6-3.9 +@typing._tp_cache +def _concatenate_getitem(self, parameters): + if parameters == (): + raise TypeError("Cannot take a Concatenate of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + if not isinstance(parameters[-1], ParamSpec): + raise TypeError("The last parameter to Concatenate should be a " + "ParamSpec variable.") + msg = "Concatenate[arg, ...]: each arg must be a type." + parameters = tuple(typing._type_check(p, msg) for p in parameters) + return _ConcatenateGenericAlias(self, parameters) + + +# 3.10+ +if hasattr(typing, 'Concatenate'): + Concatenate = typing.Concatenate + _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa +# 3.9 +elif sys.version_info[:2] >= (3, 9): + @_TypeAliasForm + def Concatenate(self, parameters): + """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """ + return _concatenate_getitem(self, parameters) +# 3.7-8 +elif sys.version_info[:2] >= (3, 7): + class _ConcatenateForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + return _concatenate_getitem(self, parameters) + + Concatenate = _ConcatenateForm( + 'Concatenate', + doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """) +# 3.6 +else: + class _ConcatenateAliasMeta(typing.TypingMeta): + """Metaclass for Concatenate.""" + + def __repr__(self): + return 'typing_extensions.Concatenate' + + class _ConcatenateAliasBase(typing._FinalTypingBase, + metaclass=_ConcatenateAliasMeta, + _root=True): + """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """ + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError("Concatenate cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Concatenate cannot be used with issubclass().") + + def __repr__(self): + return 'typing_extensions.Concatenate' + + def __getitem__(self, parameters): + return _concatenate_getitem(self, parameters) + + Concatenate = _ConcatenateAliasBase(_root=True) + +# 3.10+ +if hasattr(typing, 'TypeGuard'): + TypeGuard = typing.TypeGuard +# 3.9 +elif sys.version_info[:2] >= (3, 9): + class _TypeGuardForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + @_TypeGuardForm + def TypeGuard(self, parameters): + """Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """ + item = typing._type_check(parameters, f'{self} accepts only single type.') + return typing._GenericAlias(self, (item,)) +# 3.7-3.8 +elif sys.version_info[:2] >= (3, 7): + class _TypeGuardForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type') + return typing._GenericAlias(self, (item,)) + + TypeGuard = _TypeGuardForm( + 'TypeGuard', + doc="""Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """) +# 3.6 +else: + class _TypeGuard(typing._FinalTypingBase, _root=True): + """Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """ + + __slots__ = ('__type__',) + + def __init__(self, tp=None, **kwds): + self.__type__ = tp + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is None: + return cls(typing._type_check(item, + f'{cls.__name__[1:]} accepts only a single type.'), + _root=True) + raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') + + def _eval_type(self, globalns, localns): + new_tp = typing._eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)(new_tp, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += f'[{typing._type_repr(self.__type__)}]' + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, _TypeGuard): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + TypeGuard = _TypeGuard(_root=True) + +if hasattr(typing, "Self"): + Self = typing.Self +elif sys.version_info[:2] >= (3, 7): + # Vendored from cpython typing._SpecialFrom + class _SpecialForm(typing._Final, _root=True): + __slots__ = ('_name', '__doc__', '_getitem') + + def __init__(self, getitem): + self._getitem = getitem + self._name = getitem.__name__ + self.__doc__ = getitem.__doc__ + + def __getattr__(self, item): + if item in {'__name__', '__qualname__'}: + return self._name + + raise AttributeError(item) + + def __mro_entries__(self, bases): + raise TypeError(f"Cannot subclass {self!r}") + + def __repr__(self): + return f'typing_extensions.{self._name}' + + def __reduce__(self): + return self._name + + def __call__(self, *args, **kwds): + raise TypeError(f"Cannot instantiate {self!r}") + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + def __instancecheck__(self, obj): + raise TypeError(f"{self} cannot be used with isinstance()") + + def __subclasscheck__(self, cls): + raise TypeError(f"{self} cannot be used with issubclass()") + + @typing._tp_cache + def __getitem__(self, parameters): + return self._getitem(self, parameters) + + @_SpecialForm + def Self(self, params): + """Used to spell the type of "self" in classes. + + Example:: + + from typing import Self + + class ReturnsSelf: + def parse(self, data: bytes) -> Self: + ... + return self + + """ + + raise TypeError(f"{self} is not subscriptable") +else: + class _Self(typing._FinalTypingBase, _root=True): + """Used to spell the type of "self" in classes. + + Example:: + + from typing import Self + + class ReturnsSelf: + def parse(self, data: bytes) -> Self: + ... + return self + + """ + + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError(f"{self} cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError(f"{self} cannot be used with issubclass().") + + Self = _Self(_root=True) + + +if hasattr(typing, 'Required'): + Required = typing.Required + NotRequired = typing.NotRequired +elif sys.version_info[:2] >= (3, 9): + class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + @_ExtensionsSpecialForm + def Required(self, parameters): + """A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """ + item = typing._type_check(parameters, f'{self._name} accepts only single type') + return typing._GenericAlias(self, (item,)) + + @_ExtensionsSpecialForm + def NotRequired(self, parameters): + """A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """ + item = typing._type_check(parameters, f'{self._name} accepts only single type') + return typing._GenericAlias(self, (item,)) + +elif sys.version_info[:2] >= (3, 7): + class _RequiredForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + '{} accepts only single type'.format(self._name)) + return typing._GenericAlias(self, (item,)) + + Required = _RequiredForm( + 'Required', + doc="""A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """) + NotRequired = _RequiredForm( + 'NotRequired', + doc="""A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """) +else: + # NOTE: Modeled after _Final's implementation when _FinalTypingBase available + class _MaybeRequired(typing._FinalTypingBase, _root=True): + __slots__ = ('__type__',) + + def __init__(self, tp=None, **kwds): + self.__type__ = tp + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is None: + return cls(typing._type_check(item, + '{} accepts only single type.'.format(cls.__name__[1:])), + _root=True) + raise TypeError('{} cannot be further subscripted' + .format(cls.__name__[1:])) + + def _eval_type(self, globalns, localns): + new_tp = typing._eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)(new_tp, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += '[{}]'.format(typing._type_repr(self.__type__)) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, type(self)): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + class _Required(_MaybeRequired, _root=True): + """A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """ + + class _NotRequired(_MaybeRequired, _root=True): + """A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """ + + Required = _Required(_root=True) + NotRequired = _NotRequired(_root=True) diff --git a/setuptools/_vendor/vendored.txt b/setuptools/_vendor/vendored.txt index 8216ec99..b08b0d6f 100644 --- a/setuptools/_vendor/vendored.txt +++ b/setuptools/_vendor/vendored.txt @@ -1,4 +1,13 @@ -packaging==21.2 +packaging==21.3 pyparsing==2.2.1 ordered-set==3.1.1 more_itertools==8.8.0 +jaraco.text==3.7.0 +importlib_resources==5.4.0 +importlib_metadata==4.11.1 +nspektr==0.3.0 +# required for importlib_metadata on older Pythons +typing_extensions==4.0.1 +# required for importlib_resources and _metadata on older Pythons +zipp==3.7.0 +tomli==2.0.1 diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/INSTALLER b/setuptools/_vendor/zipp-3.7.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/setuptools/_vendor/zipp-3.7.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/LICENSE b/setuptools/_vendor/zipp-3.7.0.dist-info/LICENSE new file mode 100644 index 00000000..353924be --- /dev/null +++ b/setuptools/_vendor/zipp-3.7.0.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/METADATA b/setuptools/_vendor/zipp-3.7.0.dist-info/METADATA new file mode 100644 index 00000000..b1308b5f --- /dev/null +++ b/setuptools/_vendor/zipp-3.7.0.dist-info/METADATA @@ -0,0 +1,58 @@ +Metadata-Version: 2.1 +Name: zipp +Version: 3.7.0 +Summary: Backport of pathlib-compatible object wrapper for zip files +Home-page: https://github.com/jaraco/zipp +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.7 +License-File: LICENSE +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: jaraco.itertools ; extra == 'testing' +Requires-Dist: func-timeout ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/zipp.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/zipp.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/zipp + +.. image:: https://github.com/jaraco/zipp/workflows/tests/badge.svg + :target: https://github.com/jaraco/zipp/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. .. image:: https://readthedocs.org/projects/skeleton/badge/?version=latest +.. :target: https://skeleton.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2021-informational + :target: https://blog.jaraco.com/skeleton + + +A pathlib-compatible Zipfile object wrapper. Official backport of the standard library +`Path object <https://docs.python.org/3.8/library/zipfile.html#path-objects>`_. + + diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD b/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD new file mode 100644 index 00000000..38d0b21a --- /dev/null +++ b/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD @@ -0,0 +1,9 @@ +__pycache__/zipp.cpython-310.pyc,,
+zipp-3.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+zipp-3.7.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+zipp-3.7.0.dist-info/METADATA,sha256=ZLzgaXTyZX_MxTU0lcGfhdPY4CjFrT_3vyQ2Fo49pl8,2261
+zipp-3.7.0.dist-info/RECORD,,
+zipp-3.7.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+zipp-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
+zipp-3.7.0.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5
+zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/REQUESTED b/setuptools/_vendor/zipp-3.7.0.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/zipp-3.7.0.dist-info/REQUESTED diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/WHEEL b/setuptools/_vendor/zipp-3.7.0.dist-info/WHEEL new file mode 100644 index 00000000..becc9a66 --- /dev/null +++ b/setuptools/_vendor/zipp-3.7.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/top_level.txt b/setuptools/_vendor/zipp-3.7.0.dist-info/top_level.txt new file mode 100644 index 00000000..e82f676f --- /dev/null +++ b/setuptools/_vendor/zipp-3.7.0.dist-info/top_level.txt @@ -0,0 +1 @@ +zipp diff --git a/setuptools/_vendor/zipp.py b/setuptools/_vendor/zipp.py new file mode 100644 index 00000000..26b723c1 --- /dev/null +++ b/setuptools/_vendor/zipp.py @@ -0,0 +1,329 @@ +import io +import posixpath +import zipfile +import itertools +import contextlib +import sys +import pathlib + +if sys.version_info < (3, 7): + from collections import OrderedDict +else: + OrderedDict = dict + + +__all__ = ['Path'] + + +def _parents(path): + """ + Given a path with elements separated by + posixpath.sep, generate all parents of that path. + + >>> list(_parents('b/d')) + ['b'] + >>> list(_parents('/b/d/')) + ['/b'] + >>> list(_parents('b/d/f/')) + ['b/d', 'b'] + >>> list(_parents('b')) + [] + >>> list(_parents('')) + [] + """ + return itertools.islice(_ancestry(path), 1, None) + + +def _ancestry(path): + """ + Given a path with elements separated by + posixpath.sep, generate all elements of that path + + >>> list(_ancestry('b/d')) + ['b/d', 'b'] + >>> list(_ancestry('/b/d/')) + ['/b/d', '/b'] + >>> list(_ancestry('b/d/f/')) + ['b/d/f', 'b/d', 'b'] + >>> list(_ancestry('b')) + ['b'] + >>> list(_ancestry('')) + [] + """ + path = path.rstrip(posixpath.sep) + while path and path != posixpath.sep: + yield path + path, tail = posixpath.split(path) + + +_dedupe = OrderedDict.fromkeys +"""Deduplicate an iterable in original order""" + + +def _difference(minuend, subtrahend): + """ + Return items in minuend not in subtrahend, retaining order + with O(1) lookup. + """ + return itertools.filterfalse(set(subtrahend).__contains__, minuend) + + +class CompleteDirs(zipfile.ZipFile): + """ + A ZipFile subclass that ensures that implied directories + are always included in the namelist. + """ + + @staticmethod + def _implied_dirs(names): + parents = itertools.chain.from_iterable(map(_parents, names)) + as_dirs = (p + posixpath.sep for p in parents) + return _dedupe(_difference(as_dirs, names)) + + def namelist(self): + names = super(CompleteDirs, self).namelist() + return names + list(self._implied_dirs(names)) + + def _name_set(self): + return set(self.namelist()) + + def resolve_dir(self, name): + """ + If the name represents a directory, return that name + as a directory (with the trailing slash). + """ + names = self._name_set() + dirname = name + '/' + dir_match = name not in names and dirname in names + return dirname if dir_match else name + + @classmethod + def make(cls, source): + """ + Given a source (filename or zipfile), return an + appropriate CompleteDirs subclass. + """ + if isinstance(source, CompleteDirs): + return source + + if not isinstance(source, zipfile.ZipFile): + return cls(_pathlib_compat(source)) + + # Only allow for FastLookup when supplied zipfile is read-only + if 'r' not in source.mode: + cls = CompleteDirs + + source.__class__ = cls + return source + + +class FastLookup(CompleteDirs): + """ + ZipFile subclass to ensure implicit + dirs exist and are resolved rapidly. + """ + + def namelist(self): + with contextlib.suppress(AttributeError): + return self.__names + self.__names = super(FastLookup, self).namelist() + return self.__names + + def _name_set(self): + with contextlib.suppress(AttributeError): + return self.__lookup + self.__lookup = super(FastLookup, self)._name_set() + return self.__lookup + + +def _pathlib_compat(path): + """ + For path-like objects, convert to a filename for compatibility + on Python 3.6.1 and earlier. + """ + try: + return path.__fspath__() + except AttributeError: + return str(path) + + +class Path: + """ + A pathlib-compatible interface for zip files. + + Consider a zip file with this structure:: + + . + ├── a.txt + └── b + ├── c.txt + └── d + └── e.txt + + >>> data = io.BytesIO() + >>> zf = zipfile.ZipFile(data, 'w') + >>> zf.writestr('a.txt', 'content of a') + >>> zf.writestr('b/c.txt', 'content of c') + >>> zf.writestr('b/d/e.txt', 'content of e') + >>> zf.filename = 'mem/abcde.zip' + + Path accepts the zipfile object itself or a filename + + >>> root = Path(zf) + + From there, several path operations are available. + + Directory iteration (including the zip file itself): + + >>> a, b = root.iterdir() + >>> a + Path('mem/abcde.zip', 'a.txt') + >>> b + Path('mem/abcde.zip', 'b/') + + name property: + + >>> b.name + 'b' + + join with divide operator: + + >>> c = b / 'c.txt' + >>> c + Path('mem/abcde.zip', 'b/c.txt') + >>> c.name + 'c.txt' + + Read text: + + >>> c.read_text() + 'content of c' + + existence: + + >>> c.exists() + True + >>> (b / 'missing.txt').exists() + False + + Coercion to string: + + >>> import os + >>> str(c).replace(os.sep, posixpath.sep) + 'mem/abcde.zip/b/c.txt' + + At the root, ``name``, ``filename``, and ``parent`` + resolve to the zipfile. Note these attributes are not + valid and will raise a ``ValueError`` if the zipfile + has no filename. + + >>> root.name + 'abcde.zip' + >>> str(root.filename).replace(os.sep, posixpath.sep) + 'mem/abcde.zip' + >>> str(root.parent) + 'mem' + """ + + __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})" + + def __init__(self, root, at=""): + """ + Construct a Path from a ZipFile or filename. + + Note: When the source is an existing ZipFile object, + its type (__class__) will be mutated to a + specialized type. If the caller wishes to retain the + original type, the caller should either create a + separate ZipFile object or pass a filename. + """ + self.root = FastLookup.make(root) + self.at = at + + def open(self, mode='r', *args, pwd=None, **kwargs): + """ + Open this entry as text or binary following the semantics + of ``pathlib.Path.open()`` by passing arguments through + to io.TextIOWrapper(). + """ + if self.is_dir(): + raise IsADirectoryError(self) + zip_mode = mode[0] + if not self.exists() and zip_mode == 'r': + raise FileNotFoundError(self) + stream = self.root.open(self.at, zip_mode, pwd=pwd) + if 'b' in mode: + if args or kwargs: + raise ValueError("encoding args invalid for binary operation") + return stream + return io.TextIOWrapper(stream, *args, **kwargs) + + @property + def name(self): + return pathlib.Path(self.at).name or self.filename.name + + @property + def suffix(self): + return pathlib.Path(self.at).suffix or self.filename.suffix + + @property + def suffixes(self): + return pathlib.Path(self.at).suffixes or self.filename.suffixes + + @property + def stem(self): + return pathlib.Path(self.at).stem or self.filename.stem + + @property + def filename(self): + return pathlib.Path(self.root.filename).joinpath(self.at) + + def read_text(self, *args, **kwargs): + with self.open('r', *args, **kwargs) as strm: + return strm.read() + + def read_bytes(self): + with self.open('rb') as strm: + return strm.read() + + def _is_child(self, path): + return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/") + + def _next(self, at): + return self.__class__(self.root, at) + + def is_dir(self): + return not self.at or self.at.endswith("/") + + def is_file(self): + return self.exists() and not self.is_dir() + + def exists(self): + return self.at in self.root._name_set() + + def iterdir(self): + if not self.is_dir(): + raise ValueError("Can't listdir a file") + subs = map(self._next, self.root.namelist()) + return filter(self._is_child, subs) + + def __str__(self): + return posixpath.join(self.root.filename, self.at) + + def __repr__(self): + return self.__repr.format(self=self) + + def joinpath(self, *other): + next = posixpath.join(self.at, *map(_pathlib_compat, other)) + return self._next(self.root.resolve_dir(next)) + + __truediv__ = joinpath + + @property + def parent(self): + if not self.at: + return self.filename.parent + parent_at = posixpath.dirname(self.at.rstrip('/')) + if parent_at: + parent_at += '/' + return self._next(parent_at) diff --git a/setuptools/archive_util.py b/setuptools/archive_util.py index 0f702848..d8e10c13 100644 --- a/setuptools/archive_util.py +++ b/setuptools/archive_util.py @@ -8,7 +8,7 @@ import posixpath import contextlib from distutils.errors import DistutilsError -from pkg_resources import ensure_directory +from ._path import ensure_directory __all__ = [ "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", @@ -100,29 +100,37 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): raise UnrecognizedFormat("%s is not a zip file" % (filename,)) with zipfile.ZipFile(filename) as z: - for info in z.infolist(): - name = info.filename + _unpack_zipfile_obj(z, extract_dir, progress_filter) - # don't extract absolute paths or ones with .. in them - if name.startswith('/') or '..' in name.split('/'): - continue - target = os.path.join(extract_dir, *name.split('/')) - target = progress_filter(name, target) - if not target: - continue - if name.endswith('/'): - # directory - ensure_directory(target) - else: - # file - ensure_directory(target) - data = z.read(info.filename) - with open(target, 'wb') as f: - f.write(data) - unix_attributes = info.external_attr >> 16 - if unix_attributes: - os.chmod(target, unix_attributes) +def _unpack_zipfile_obj(zipfile_obj, extract_dir, progress_filter=default_filter): + """Internal/private API used by other parts of setuptools. + Similar to ``unpack_zipfile``, but receives an already opened :obj:`zipfile.ZipFile` + object instead of a filename. + """ + for info in zipfile_obj.infolist(): + name = info.filename + + # don't extract absolute paths or ones with .. in them + if name.startswith('/') or '..' in name.split('/'): + continue + + target = os.path.join(extract_dir, *name.split('/')) + target = progress_filter(name, target) + if not target: + continue + if name.endswith('/'): + # directory + ensure_directory(target) + else: + # file + ensure_directory(target) + data = zipfile_obj.read(info.filename) + with open(target, 'wb') as f: + f.write(data) + unix_attributes = info.external_attr >> 16 + if unix_attributes: + os.chmod(target, unix_attributes) def _resolve_tar_file_or_dir(tar_obj, tar_member_obj): diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py index d0ac613b..5dc65e2d 100644 --- a/setuptools/build_meta.py +++ b/setuptools/build_meta.py @@ -37,8 +37,9 @@ import warnings import setuptools import distutils +from ._reqs import parse_strings +from .extern.more_itertools import always_iterable -from pkg_resources import parse_requirements __all__ = ['get_requires_for_build_sdist', 'get_requires_for_build_wheel', @@ -56,7 +57,7 @@ class SetupRequirementsError(BaseException): class Distribution(setuptools.dist.Distribution): def fetch_build_eggs(self, specifiers): - specifier_list = list(map(str, parse_requirements(specifiers))) + specifier_list = list(parse_strings(specifiers)) raise SetupRequirementsError(specifier_list) @@ -126,11 +127,26 @@ def suppress_known_deprecation(): yield -class _BuildMetaBackend(object): +class _BuildMetaBackend: - def _fix_config(self, config_settings): + @staticmethod + def _fix_config(config_settings): + """ + Ensure config settings meet certain expectations. + + >>> fc = _BuildMetaBackend._fix_config + >>> fc(None) + {'--global-option': []} + >>> fc({}) + {'--global-option': []} + >>> fc({'--global-option': 'foo'}) + {'--global-option': ['foo']} + >>> fc({'--global-option': ['foo']}) + {'--global-option': ['foo']} + """ config_settings = config_settings or {} - config_settings.setdefault('--global-option', []) + config_settings['--global-option'] = list(always_iterable( + config_settings.get('--global-option'))) return config_settings def _get_build_requires(self, config_settings, requirements): @@ -158,12 +174,10 @@ class _BuildMetaBackend(object): exec(compile(code, __file__, 'exec'), locals()) def get_requires_for_build_wheel(self, config_settings=None): - config_settings = self._fix_config(config_settings) return self._get_build_requires( config_settings, requirements=['wheel']) def get_requires_for_build_sdist(self, config_settings=None): - config_settings = self._fix_config(config_settings) return self._get_build_requires(config_settings, requirements=[]) def prepare_metadata_for_build_wheel(self, metadata_directory, diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py index e6b1609f..11a1c6be 100644 --- a/setuptools/command/bdist_egg.py +++ b/setuptools/command/bdist_egg.py @@ -11,9 +11,10 @@ import re import textwrap import marshal -from pkg_resources import get_build_platform, Distribution, ensure_directory +from pkg_resources import get_build_platform, Distribution from setuptools.extension import Library from setuptools import Command +from .._path import ensure_directory from sysconfig import get_path, get_python_version diff --git a/setuptools/command/dist_info.py b/setuptools/command/dist_info.py index c45258fa..8b8509f3 100644 --- a/setuptools/command/dist_info.py +++ b/setuptools/command/dist_info.py @@ -4,9 +4,13 @@ As defined in the wheel specification """ import os +import re +import warnings +from inspect import cleandoc from distutils.core import Command from distutils import log +from setuptools.extern import packaging class dist_info(Command): @@ -29,8 +33,36 @@ class dist_info(Command): egg_info.egg_base = self.egg_base egg_info.finalize_options() egg_info.run() - dist_info_dir = egg_info.egg_info[:-len('.egg-info')] + '.dist-info' + name = _safe(self.distribution.get_name()) + version = _version(self.distribution.get_version()) + base = self.egg_base or os.curdir + dist_info_dir = os.path.join(base, f"{name}-{version}.dist-info") log.info("creating '{}'".format(os.path.abspath(dist_info_dir))) bdist_wheel = self.get_finalized_command('bdist_wheel') bdist_wheel.egg2dist(egg_info.egg_info, dist_info_dir) + + +def _safe(component: str) -> str: + """Escape a component used to form a wheel name according to PEP 491""" + return re.sub(r"[^\w\d.]+", "_", component) + + +def _version(version: str) -> str: + """Convert an arbitrary string to a version string.""" + v = version.replace(' ', '.') + try: + return str(packaging.version.Version(v)).replace("-", "_") + except packaging.version.InvalidVersion: + msg = f"""!!\n\n + ################### + # Invalid version # + ################### + {version!r} is not valid according to PEP 440.\n + Please make sure specify a valid version for your package. + Also note that future releases of setuptools may halt the build process + if an invalid version is given. + \n\n!! + """ + warnings.warn(cleandoc(msg)) + return _safe(v).strip("_") diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index d7ea033b..302463f3 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -17,10 +17,10 @@ from distutils.errors import ( DistutilsArgError, DistutilsOptionError, DistutilsError, DistutilsPlatformError, ) -from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS from distutils import log, dir_util from distutils.command.build_scripts import first_line_re from distutils.spawn import find_executable +from distutils.command import install import sys import os import zipimport @@ -39,9 +39,10 @@ import subprocess import shlex import io import configparser +import sysconfig -from sysconfig import get_config_vars, get_path +from sysconfig import get_path from setuptools import SetuptoolsDeprecationWarning @@ -54,18 +55,21 @@ from setuptools.package_index import ( from setuptools.command import bdist_egg from setuptools.wheel import Wheel from pkg_resources import ( - yield_lines, normalize_path, resource_string, ensure_directory, + normalize_path, resource_string, get_distribution, find_distributions, Environment, Requirement, Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound, VersionConflict, DEVELOP_DIST, ) import pkg_resources +from .._path import ensure_directory +from ..extern.jaraco.text import yield_lines + # Turn on PEP440Warnings warnings.filterwarnings("default", category=pkg_resources.PEP440Warning) __all__ = [ - 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', + 'easy_install', 'PthDistributions', 'extract_wininst_cfg', 'get_exe_prefixes', ] @@ -74,22 +78,6 @@ def is_64bit(): return struct.calcsize("P") == 8 -def samefile(p1, p2): - """ - Determine if two paths reference the same file. - - Augments os.path.samefile to work on Windows and - suppresses errors if the path doesn't exist. - """ - both_exist = os.path.exists(p1) and os.path.exists(p2) - use_samefile = hasattr(os.path, 'samefile') and both_exist - if use_samefile: - return os.path.samefile(p1, p2) - norm_p1 = os.path.normpath(os.path.normcase(p1)) - norm_p2 = os.path.normpath(os.path.normcase(p2)) - return norm_p1 == norm_p2 - - def _to_bytes(s): return s.encode('utf8') @@ -180,12 +168,8 @@ class easy_install(Command): self.install_data = None self.install_base = None self.install_platbase = None - if site.ENABLE_USER_SITE: - self.install_userbase = site.USER_BASE - self.install_usersite = site.USER_SITE - else: - self.install_userbase = None - self.install_usersite = None + self.install_userbase = site.USER_BASE + self.install_usersite = site.USER_SITE self.no_find_links = None # Options not specifiable via command line @@ -235,28 +219,38 @@ class easy_install(Command): self.version and self._render_version() py_version = sys.version.split()[0] - prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix') - self.config_vars = { + self.config_vars = dict(sysconfig.get_config_vars()) + + self.config_vars.update({ 'dist_name': self.distribution.get_name(), 'dist_version': self.distribution.get_version(), 'dist_fullname': self.distribution.get_fullname(), 'py_version': py_version, - 'py_version_short': py_version[0:3], - 'py_version_nodot': py_version[0] + py_version[2], - 'sys_prefix': prefix, - 'prefix': prefix, - 'sys_exec_prefix': exec_prefix, - 'exec_prefix': exec_prefix, + 'py_version_short': f'{sys.version_info.major}.{sys.version_info.minor}', + 'py_version_nodot': f'{sys.version_info.major}{sys.version_info.minor}', + 'sys_prefix': self.config_vars['prefix'], + 'sys_exec_prefix': self.config_vars['exec_prefix'], # Only python 3.2+ has abiflags 'abiflags': getattr(sys, 'abiflags', ''), - } - - if site.ENABLE_USER_SITE: - self.config_vars['userbase'] = self.install_userbase - self.config_vars['usersite'] = self.install_usersite + 'platlibdir': getattr(sys, 'platlibdir', 'lib'), + }) + with contextlib.suppress(AttributeError): + # only for distutils outside stdlib + self.config_vars.update({ + 'implementation_lower': install._get_implementation().lower(), + 'implementation': install._get_implementation(), + }) + + # pypa/distutils#113 Python 3.9 compat + self.config_vars.setdefault( + 'py_version_nodot_plat', + getattr(sys, 'windir', '').replace('.', ''), + ) - elif self.user: + self.config_vars['userbase'] = self.install_userbase + self.config_vars['usersite'] = self.install_usersite + if self.user and not site.ENABLE_USER_SITE: log.warn("WARNING: The user site-packages directory is disabled.") self._fix_install_dir_for_user_site() @@ -292,27 +286,14 @@ class easy_install(Command): self.script_dir = self.install_scripts # default --record from the install command self.set_undefined_options('install', ('record', 'record')) - # Should this be moved to the if statement below? It's not used - # elsewhere - normpath = map(normalize_path, sys.path) self.all_site_dirs = get_site_dirs() - if self.site_dirs is not None: - site_dirs = [ - os.path.expanduser(s.strip()) for s in - self.site_dirs.split(',') - ] - for d in site_dirs: - if not os.path.isdir(d): - log.warn("%s (in --site-dirs) does not exist", d) - elif normalize_path(d) not in normpath: - raise DistutilsOptionError( - d + " (in --site-dirs) is not on sys.path" - ) - else: - self.all_site_dirs.append(normalize_path(d)) + self.all_site_dirs.extend(self._process_site_dirs(self.site_dirs)) + if not self.editable: self.check_site_dir() - self.index_url = self.index_url or "https://pypi.org/simple/" + default_index = os.getenv("__EASYINSTALL_INDEX", "https://pypi.org/simple/") + # ^ Private API for testing purposes only + self.index_url = self.index_url or default_index self.shadow_path = self.all_site_dirs[:] for path_item in self.install_dir, normalize_path(self.script_dir): if path_item not in self.shadow_path: @@ -338,15 +319,7 @@ class easy_install(Command): if not self.no_find_links: self.package_index.add_find_links(self.find_links) self.set_undefined_options('install_lib', ('optimize', 'optimize')) - if not isinstance(self.optimize, int): - try: - self.optimize = int(self.optimize) - if not (0 <= self.optimize <= 2): - raise ValueError - except ValueError as e: - raise DistutilsOptionError( - "--optimize must be 0, 1, or 2" - ) from e + self.optimize = self._validate_optimize(self.optimize) if self.editable and not self.build_directory: raise DistutilsArgError( @@ -358,11 +331,44 @@ class easy_install(Command): self.outputs = [] + @staticmethod + def _process_site_dirs(site_dirs): + if site_dirs is None: + return + + normpath = map(normalize_path, sys.path) + site_dirs = [ + os.path.expanduser(s.strip()) for s in + site_dirs.split(',') + ] + for d in site_dirs: + if not os.path.isdir(d): + log.warn("%s (in --site-dirs) does not exist", d) + elif normalize_path(d) not in normpath: + raise DistutilsOptionError( + d + " (in --site-dirs) is not on sys.path" + ) + else: + yield normalize_path(d) + + @staticmethod + def _validate_optimize(value): + try: + value = int(value) + if value not in range(3): + raise ValueError + except ValueError as e: + raise DistutilsOptionError( + "--optimize must be 0, 1, or 2" + ) from e + + return value + def _fix_install_dir_for_user_site(self): """ Fix the install_dir if "--user" was used. """ - if not self.user or not site.ENABLE_USER_SITE: + if not self.user: return self.create_home_path() @@ -370,7 +376,7 @@ class easy_install(Command): msg = "User base directory is not specified" raise DistutilsPlatformError(msg) self.install_base = self.install_platbase = self.install_userbase - scheme_name = os.name.replace('posix', 'unix') + '_user' + scheme_name = f'{os.name}_user' self.select_scheme(scheme_name) def _expand_attrs(self, attrs): @@ -710,13 +716,11 @@ class easy_install(Command): return dist def select_scheme(self, name): - """Sets the install directories by applying the install schemes.""" - # it's the caller's problem if they supply a bad name! - scheme = INSTALL_SCHEMES[name] - for key in SCHEME_KEYS: - attrname = 'install_' + key - if getattr(self, attrname) is None: - setattr(self, attrname, scheme[key]) + try: + install._select_scheme(self, name) + except AttributeError: + # stdlib distutils + install.install.select_scheme(self, name.replace('posix', 'unix')) # FIXME: 'easy_install.process_distribution' is too complex (12) def process_distribution( # noqa: C901 @@ -913,7 +917,9 @@ class easy_install(Command): ensure_directory(destination) dist = self.egg_distribution(egg_path) - if not samefile(egg_path, destination): + if not ( + os.path.exists(destination) and os.path.samefile(egg_path, destination) + ): if os.path.isdir(destination) and not os.path.islink(destination): dir_util.remove_tree(destination, dry_run=self.dry_run) elif os.path.exists(destination): @@ -1303,7 +1309,7 @@ class easy_install(Command): if not self.user: return home = convert_path(os.path.expanduser("~")) - for name, path in self.config_vars.items(): + for path in only_strs(self.config_vars.values()): if path.startswith(home) and not os.path.isdir(path): self.debug_print("os.makedirs('%s', 0o700)" % path) os.makedirs(path, 0o700) @@ -1325,7 +1331,7 @@ class easy_install(Command): if self.prefix: # Set default install_dir/scripts from --prefix - config_vars = config_vars.copy() + config_vars = dict(config_vars) config_vars['base'] = self.prefix scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME) for attr, val in scheme.items(): @@ -1552,7 +1558,7 @@ class PthDistributions(Environment): self.sitedirs = list(map(normalize_path, sitedirs)) self.basedir = normalize_path(os.path.dirname(self.filename)) self._load() - Environment.__init__(self, [], None, None) + super().__init__([], None, None) for path in yield_lines(self.paths): list(map(self.add, find_distributions(path, True))) @@ -1625,14 +1631,14 @@ class PthDistributions(Environment): if new_path: self.paths.append(dist.location) self.dirty = True - Environment.add(self, dist) + super().add(dist) def remove(self, dist): """Remove `dist` from the distribution map""" while dist.location in self.paths: self.paths.remove(dist.location) self.dirty = True - Environment.remove(self, dist) + super().remove(dist) def make_relative(self, path): npath, last = os.path.split(normalize_path(path)) @@ -2273,6 +2279,13 @@ def current_umask(): return tmp +def only_strs(values): + """ + Exclude non-str values. Ref #3063. + """ + return filter(lambda val: isinstance(val, str), values) + + class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning): """ Warning for EasyInstall deprecations, bypassing suppression. diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py index f2210292..c37ab81f 100644 --- a/setuptools/command/egg_info.py +++ b/setuptools/command/egg_info.py @@ -17,18 +17,22 @@ import warnings import time import collections +from .._importlib import metadata +from .. import _entry_points + from setuptools import Command from setuptools.command.sdist import sdist from setuptools.command.sdist import walk_revctrl from setuptools.command.setopt import edit_config from setuptools.command import bdist_egg from pkg_resources import ( - parse_requirements, safe_name, parse_version, - safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename) + Requirement, safe_name, parse_version, + safe_version, to_filename) import setuptools.unicode_utils as unicode_utils from setuptools.glob import glob from setuptools.extern import packaging +from setuptools.extern.jaraco.text import yield_lines from setuptools import SetuptoolsDeprecationWarning @@ -132,11 +136,21 @@ class InfoCommon: in which case the version string already contains all tags. """ return ( - version if self.vtags and version.endswith(self.vtags) + version if self.vtags and self._already_tagged(version) else version + self.vtags ) - def tags(self): + def _already_tagged(self, version: str) -> bool: + # Depending on their format, tags may change with version normalization. + # So in addition the regular tags, we have to search for the normalized ones. + return version.endswith(self.vtags) or version.endswith(self._safe_tags()) + + def _safe_tags(self) -> str: + # To implement this we can rely on `safe_version` pretending to be version 0 + # followed by tags. Then we simply discard the starting 0 (fake version number) + return safe_version(f"0{self.vtags}")[1:] + + def tags(self) -> str: version = '' if self.tag_build: version += self.tag_build @@ -205,12 +219,8 @@ class egg_info(InfoCommon, Command): try: is_version = isinstance(parsed_version, packaging.version.Version) - spec = ( - "%s==%s" if is_version else "%s===%s" - ) - list( - parse_requirements(spec % (self.egg_name, self.egg_version)) - ) + spec = "%s==%s" if is_version else "%s===%s" + Requirement(spec % (self.egg_name, self.egg_version)) except ValueError as e: raise distutils.errors.DistutilsOptionError( "Invalid distribution name or version syntax: %s-%s" % @@ -285,10 +295,9 @@ class egg_info(InfoCommon, Command): def run(self): self.mkpath(self.egg_info) os.utime(self.egg_info, None) - installer = self.distribution.fetch_build_egg - for ep in iter_entry_points('egg_info.writers'): - ep.require(installer=installer) - writer = ep.resolve() + for ep in metadata.entry_points(group='egg_info.writers'): + self.distribution._install_dependencies(ep) + writer = ep.load() writer(self, ep.name, os.path.join(self.egg_info, ep.name)) # Get rid of native_libs.txt if it was put there by older bdist_egg @@ -719,20 +728,9 @@ def write_arg(cmd, basename, filename, force=False): def write_entries(cmd, basename, filename): - ep = cmd.distribution.entry_points - - if isinstance(ep, str) or ep is None: - data = ep - elif ep is not None: - data = [] - for section, contents in sorted(ep.items()): - if not isinstance(contents, str): - contents = EntryPoint.parse_group(section, contents) - contents = '\n'.join(sorted(map(str, contents.values()))) - data.append('[%s]\n%s\n\n' % (section, contents)) - data = ''.join(data) - - cmd.write_or_delete_file('entry points', filename, data, True) + eps = _entry_points.load(cmd.distribution.entry_points) + defn = _entry_points.render(eps) + cmd.write_or_delete_file('entry points', filename, defn, True) def get_pkg_info_revision(): diff --git a/setuptools/command/install.py b/setuptools/command/install.py index 35e54d20..55fdb124 100644 --- a/setuptools/command/install.py +++ b/setuptools/command/install.py @@ -91,14 +91,21 @@ class install(orig.install): msg = "For best results, pass -X:Frames to enable call stack." warnings.warn(msg) return True - res = inspect.getouterframes(run_frame)[2] - caller, = res[:1] - info = inspect.getframeinfo(caller) - caller_module = caller.f_globals.get('__name__', '') - return ( - caller_module == 'distutils.dist' - and info.function == 'run_commands' - ) + + frames = inspect.getouterframes(run_frame) + for frame in frames[2:4]: + caller, = frame[:1] + info = inspect.getframeinfo(caller) + caller_module = caller.f_globals.get('__name__', '') + + if caller_module == "setuptools.dist" and info.function == "run_command": + # Starting from v61.0.0 setuptools overwrites dist.run_command + continue + + return ( + caller_module == 'distutils.dist' + and info.function == 'run_commands' + ) def do_egg_install(self): diff --git a/setuptools/command/install_egg_info.py b/setuptools/command/install_egg_info.py index edc4718b..65ede406 100644 --- a/setuptools/command/install_egg_info.py +++ b/setuptools/command/install_egg_info.py @@ -4,6 +4,7 @@ import os from setuptools import Command from setuptools import namespaces from setuptools.archive_util import unpack_archive +from .._path import ensure_directory import pkg_resources @@ -37,7 +38,7 @@ class install_egg_info(namespaces.Installer, Command): elif os.path.exists(self.target): self.execute(os.unlink, (self.target,), "Removing " + self.target) if not self.dry_run: - pkg_resources.ensure_directory(self.target) + ensure_directory(self.target) self.execute( self.copytree, (), "Copying %s to %s" % (self.source, self.target) ) diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py index 9cd8eb06..aeb0e424 100644 --- a/setuptools/command/install_scripts.py +++ b/setuptools/command/install_scripts.py @@ -4,7 +4,8 @@ from distutils.errors import DistutilsModuleError import os import sys -from pkg_resources import Distribution, PathMetadata, ensure_directory +from pkg_resources import Distribution, PathMetadata +from .._path import ensure_directory class install_scripts(orig.install_scripts): diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py index 0285b690..0ffeacf3 100644 --- a/setuptools/command/sdist.py +++ b/setuptools/command/sdist.py @@ -7,14 +7,14 @@ import contextlib from .py36compat import sdist_add_defaults -import pkg_resources +from .._importlib import metadata _default_revctrl = list def walk_revctrl(dirname=''): """Find all files under revision control""" - for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): + for ep in metadata.entry_points(group='setuptools.file_finders'): for item in ep.load()(dirname): yield item diff --git a/setuptools/command/test.py b/setuptools/command/test.py index 4a389e4d..652f3e4a 100644 --- a/setuptools/command/test.py +++ b/setuptools/command/test.py @@ -16,10 +16,11 @@ from pkg_resources import ( evaluate_marker, add_activation_listener, require, - EntryPoint, ) +from .._importlib import metadata from setuptools import Command from setuptools.extern.more_itertools import unique_everseen +from setuptools.extern.jaraco.functools import pass_none class ScanningLoader(TestLoader): @@ -241,12 +242,10 @@ class test(Command): return ['unittest'] + self.test_args @staticmethod + @pass_none def _resolve_as_ep(val): """ Load the indicated attribute value, called, as a as if it were specified as an entry point. """ - if val is None: - return - parsed = EntryPoint.parse("x=" + val) - return parsed.resolve()() + return metadata.EntryPoint(value=val, name=None, group=None).load()() diff --git a/setuptools/command/upload_docs.py b/setuptools/command/upload_docs.py index 845bff44..a5480005 100644 --- a/setuptools/command/upload_docs.py +++ b/setuptools/command/upload_docs.py @@ -17,8 +17,11 @@ import itertools import functools import http.client import urllib.parse +import warnings + +from .._importlib import metadata +from .. import SetuptoolsDeprecationWarning -from pkg_resources import iter_entry_points from .upload import upload @@ -43,9 +46,10 @@ class upload_docs(upload): boolean_options = upload.boolean_options def has_sphinx(self): - if self.upload_dir is None: - for ep in iter_entry_points('distutils.commands', 'build_sphinx'): - return True + return bool( + self.upload_dir is None + and metadata.entry_points(group='distutils.commands', name='build_sphinx') + ) sub_commands = [('build_sphinx', has_sphinx)] @@ -87,6 +91,12 @@ class upload_docs(upload): zip_file.close() def run(self): + warnings.warn( + "upload_docs is deprecated and will be removed in a future " + "version. Use tools like httpie or curl instead.", + SetuptoolsDeprecationWarning, + ) + # Run sub commands for cmd_name in self.get_sub_commands(): self.run_command(cmd_name) diff --git a/setuptools/config/__init__.py b/setuptools/config/__init__.py new file mode 100644 index 00000000..35458d8e --- /dev/null +++ b/setuptools/config/__init__.py @@ -0,0 +1,35 @@ +"""For backward compatibility, expose main functions from +``setuptools.config.setupcfg`` +""" +import warnings +from functools import wraps +from textwrap import dedent +from typing import Callable, TypeVar, cast + +from .._deprecation_warning import SetuptoolsDeprecationWarning +from . import setupcfg + +Fn = TypeVar("Fn", bound=Callable) + +__all__ = ('parse_configuration', 'read_configuration') + + +def _deprecation_notice(fn: Fn) -> Fn: + @wraps(fn) + def _wrapper(*args, **kwargs): + msg = f"""\ + As setuptools moves its configuration towards `pyproject.toml`, + `{__name__}.{fn.__name__}` became deprecated. + + For the time being, you can use the `{setupcfg.__name__}` module + to access a backward compatible API, but this module is provisional + and might be removed in the future. + """ + warnings.warn(dedent(msg), SetuptoolsDeprecationWarning) + return fn(*args, **kwargs) + + return cast(Fn, _wrapper) + + +read_configuration = _deprecation_notice(setupcfg.read_configuration) +parse_configuration = _deprecation_notice(setupcfg.parse_configuration) diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py new file mode 100644 index 00000000..a580b63f --- /dev/null +++ b/setuptools/config/_apply_pyprojecttoml.py @@ -0,0 +1,360 @@ +"""Translation layer between pyproject config and setuptools distribution and +metadata objects. + +The distribution and metadata objects are modeled after (an old version of) +core metadata, therefore configs in the format specified for ``pyproject.toml`` +need to be processed before being applied. +""" +import logging +import os +import warnings +from collections.abc import Mapping +from email.headerregistry import Address +from functools import partial, reduce +from itertools import chain +from types import MappingProxyType +from typing import (TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set, Tuple, + Type, Union) + +if TYPE_CHECKING: + from setuptools._importlib import metadata # noqa + from setuptools.dist import Distribution # noqa + +EMPTY: Mapping = MappingProxyType({}) # Immutable dict-like +_Path = Union[os.PathLike, str] +_DictOrStr = Union[dict, str] +_CorrespFn = Callable[["Distribution", Any, _Path], None] +_Correspondence = Union[str, _CorrespFn] + +_logger = logging.getLogger(__name__) + + +def apply(dist: "Distribution", config: dict, filename: _Path) -> "Distribution": + """Apply configuration dict read with :func:`read_configuration`""" + + if not config: + return dist # short-circuit unrelated pyproject.toml file + + root_dir = os.path.dirname(filename) or "." + + _apply_project_table(dist, config, root_dir) + _apply_tool_table(dist, config, filename) + + current_directory = os.getcwd() + os.chdir(root_dir) + try: + dist._finalize_requires() + dist._finalize_license_files() + finally: + os.chdir(current_directory) + + return dist + + +def _apply_project_table(dist: "Distribution", config: dict, root_dir: _Path): + project_table = config.get("project", {}).copy() + if not project_table: + return # short-circuit + + _handle_missing_dynamic(dist, project_table) + _unify_entry_points(project_table) + + for field, value in project_table.items(): + norm_key = json_compatible_key(field) + corresp = PYPROJECT_CORRESPONDENCE.get(norm_key, norm_key) + if callable(corresp): + corresp(dist, value, root_dir) + else: + _set_config(dist, corresp, value) + + +def _apply_tool_table(dist: "Distribution", config: dict, filename: _Path): + tool_table = config.get("tool", {}).get("setuptools", {}) + if not tool_table: + return # short-circuit + + for field, value in tool_table.items(): + norm_key = json_compatible_key(field) + norm_key = TOOL_TABLE_RENAMES.get(norm_key, norm_key) + _set_config(dist, norm_key, value) + + _copy_command_options(config, dist, filename) + + +def _handle_missing_dynamic(dist: "Distribution", project_table: dict): + """Be temporarily forgiving with ``dynamic`` fields not listed in ``dynamic``""" + # TODO: Set fields back to `None` once the feature stabilizes + dynamic = set(project_table.get("dynamic", [])) + for field, getter in _PREVIOUSLY_DEFINED.items(): + if not (field in project_table or field in dynamic): + value = getter(dist) + if value: + msg = _WouldIgnoreField.message(field, value) + warnings.warn(msg, _WouldIgnoreField) + + +def json_compatible_key(key: str) -> str: + """As defined in :pep:`566#json-compatible-metadata`""" + return key.lower().replace("-", "_") + + +def _set_config(dist: "Distribution", field: str, value: Any): + setter = getattr(dist.metadata, f"set_{field}", None) + if setter: + setter(value) + elif hasattr(dist.metadata, field) or field in SETUPTOOLS_PATCHES: + setattr(dist.metadata, field, value) + else: + setattr(dist, field, value) + + +_CONTENT_TYPES = { + ".md": "text/markdown", + ".rst": "text/x-rst", + ".txt": "text/plain", +} + + +def _guess_content_type(file: str) -> Optional[str]: + _, ext = os.path.splitext(file.lower()) + if not ext: + return None + + if ext in _CONTENT_TYPES: + return _CONTENT_TYPES[ext] + + valid = ", ".join(f"{k} ({v})" for k, v in _CONTENT_TYPES.items()) + msg = f"only the following file extensions are recognized: {valid}." + raise ValueError(f"Undefined content type for {file}, {msg}") + + +def _long_description(dist: "Distribution", val: _DictOrStr, root_dir: _Path): + from setuptools.config import expand + + if isinstance(val, str): + text = expand.read_files(val, root_dir) + ctype = _guess_content_type(val) + else: + text = val.get("text") or expand.read_files(val.get("file", []), root_dir) + ctype = val["content-type"] + + _set_config(dist, "long_description", text) + if ctype: + _set_config(dist, "long_description_content_type", ctype) + + +def _license(dist: "Distribution", val: dict, root_dir: _Path): + from setuptools.config import expand + + if "file" in val: + _set_config(dist, "license", expand.read_files([val["file"]], root_dir)) + else: + _set_config(dist, "license", val["text"]) + + +def _people(dist: "Distribution", val: List[dict], _root_dir: _Path, kind: str): + field = [] + email_field = [] + for person in val: + if "name" not in person: + email_field.append(person["email"]) + elif "email" not in person: + field.append(person["name"]) + else: + addr = Address(display_name=person["name"], addr_spec=person["email"]) + email_field.append(str(addr)) + + if field: + _set_config(dist, kind, ", ".join(field)) + if email_field: + _set_config(dist, f"{kind}_email", ", ".join(email_field)) + + +def _project_urls(dist: "Distribution", val: dict, _root_dir): + _set_config(dist, "project_urls", val) + + +def _python_requires(dist: "Distribution", val: dict, _root_dir): + from setuptools.extern.packaging.specifiers import SpecifierSet + + _set_config(dist, "python_requires", SpecifierSet(val)) + + +def _dependencies(dist: "Distribution", val: list, _root_dir): + existing = getattr(dist, "install_requires", []) + _set_config(dist, "install_requires", existing + val) + + +def _optional_dependencies(dist: "Distribution", val: dict, _root_dir): + existing = getattr(dist, "extras_require", {}) + _set_config(dist, "extras_require", {**existing, **val}) + + +def _unify_entry_points(project_table: dict): + project = project_table + entry_points = project.pop("entry-points", project.pop("entry_points", {})) + renaming = {"scripts": "console_scripts", "gui_scripts": "gui_scripts"} + for key, value in list(project.items()): # eager to allow modifications + norm_key = json_compatible_key(key) + if norm_key in renaming and value: + entry_points[renaming[norm_key]] = project.pop(key) + + if entry_points: + project["entry-points"] = { + name: [f"{k} = {v}" for k, v in group.items()] + for name, group in entry_points.items() + } + + +def _copy_command_options(pyproject: dict, dist: "Distribution", filename: _Path): + tool_table = pyproject.get("tool", {}) + cmdclass = tool_table.get("setuptools", {}).get("cmdclass", {}) + valid_options = _valid_command_options(cmdclass) + + cmd_opts = dist.command_options + for cmd, config in pyproject.get("tool", {}).get("distutils", {}).items(): + cmd = json_compatible_key(cmd) + valid = valid_options.get(cmd, set()) + cmd_opts.setdefault(cmd, {}) + for key, value in config.items(): + key = json_compatible_key(key) + cmd_opts[cmd][key] = (str(filename), value) + if key not in valid: + # To avoid removing options that are specified dynamically we + # just log a warn... + _logger.warning(f"Command option {cmd}.{key} is not defined") + + +def _valid_command_options(cmdclass: Mapping = EMPTY) -> Dict[str, Set[str]]: + from .._importlib import metadata + from setuptools.dist import Distribution + + valid_options = {"global": _normalise_cmd_options(Distribution.global_options)} + + unloaded_entry_points = metadata.entry_points(group='distutils.commands') + loaded_entry_points = (_load_ep(ep) for ep in unloaded_entry_points) + entry_points = (ep for ep in loaded_entry_points if ep) + for cmd, cmd_class in chain(entry_points, cmdclass.items()): + opts = valid_options.get(cmd, set()) + opts = opts | _normalise_cmd_options(getattr(cmd_class, "user_options", [])) + valid_options[cmd] = opts + + return valid_options + + +def _load_ep(ep: "metadata.EntryPoint") -> Optional[Tuple[str, Type]]: + # Ignore all the errors + try: + return (ep.name, ep.load()) + except Exception as ex: + msg = f"{ex.__class__.__name__} while trying to load entry-point {ep.name}" + _logger.warning(f"{msg}: {ex}") + return None + + +def _normalise_cmd_option_key(name: str) -> str: + return json_compatible_key(name).strip("_=") + + +def _normalise_cmd_options(desc: List[Tuple[str, Optional[str], str]]) -> Set[str]: + return {_normalise_cmd_option_key(fancy_option[0]) for fancy_option in desc} + + +def _attrgetter(attr): + """ + Similar to ``operator.attrgetter`` but returns None if ``attr`` is not found + >>> from types import SimpleNamespace + >>> obj = SimpleNamespace(a=42, b=SimpleNamespace(c=13)) + >>> _attrgetter("a")(obj) + 42 + >>> _attrgetter("b.c")(obj) + 13 + >>> _attrgetter("d")(obj) is None + True + """ + return partial(reduce, lambda acc, x: getattr(acc, x, None), attr.split(".")) + + +def _some_attrgetter(*items): + """ + Return the first "truth-y" attribute or None + >>> from types import SimpleNamespace + >>> obj = SimpleNamespace(a=42, b=SimpleNamespace(c=13)) + >>> _some_attrgetter("d", "a", "b.c")(obj) + 42 + >>> _some_attrgetter("d", "e", "b.c", "a")(obj) + 13 + >>> _some_attrgetter("d", "e", "f")(obj) is None + True + """ + def _acessor(obj): + values = (_attrgetter(i)(obj) for i in items) + return next((i for i in values if i is not None), None) + return _acessor + + +PYPROJECT_CORRESPONDENCE: Dict[str, _Correspondence] = { + "readme": _long_description, + "license": _license, + "authors": partial(_people, kind="author"), + "maintainers": partial(_people, kind="maintainer"), + "urls": _project_urls, + "dependencies": _dependencies, + "optional_dependencies": _optional_dependencies, + "requires_python": _python_requires, +} + +TOOL_TABLE_RENAMES = {"script_files": "scripts"} + +SETUPTOOLS_PATCHES = {"long_description_content_type", "project_urls", + "provides_extras", "license_file", "license_files"} + +_PREVIOUSLY_DEFINED = { + "name": _attrgetter("metadata.name"), + "version": _attrgetter("metadata.version"), + "description": _attrgetter("metadata.description"), + "readme": _attrgetter("metadata.long_description"), + "requires-python": _some_attrgetter("python_requires", "metadata.python_requires"), + "license": _attrgetter("metadata.license"), + "authors": _some_attrgetter("metadata.author", "metadata.author_email"), + "maintainers": _some_attrgetter("metadata.maintainer", "metadata.maintainer_email"), + "keywords": _attrgetter("metadata.keywords"), + "classifiers": _attrgetter("metadata.classifiers"), + "urls": _attrgetter("metadata.project_urls"), + "entry-points": _attrgetter("entry_points"), + "dependencies": _some_attrgetter("_orig_install_requires", "install_requires"), + "optional-dependencies": _some_attrgetter("_orig_extras_require", "extras_require"), +} + + +class _WouldIgnoreField(UserWarning): + """Inform users that ``pyproject.toml`` would overwrite previously defined metadata: + !!\n\n + ########################################################################## + # configuration would be ignored/result in error due to `pyproject.toml` # + ########################################################################## + + The following seems to be defined outside of `pyproject.toml`: + + `{field} = {value!r}` + + According to the spec (see the link bellow), however, setuptools CANNOT + consider this value unless {field!r} is listed as `dynamic`. + + https://packaging.python.org/en/latest/specifications/declaring-project-metadata/ + + For the time being, `setuptools` will still consider the given value (as a + **transitional** measure), but please note that future releases of setuptools will + follow strictly the standard. + + To prevent this warning, you can list {field!r} under `dynamic` or alternatively + remove the `[project]` table from your file and rely entirely on other means of + configuration. + \n\n!! + """ + + @classmethod + def message(cls, field, value): + from inspect import cleandoc + msg = "\n".join(cls.__doc__.splitlines()[1:]) + return cleandoc(msg.format(field=field, value=value)) diff --git a/setuptools/config/_validate_pyproject/NOTICE b/setuptools/config/_validate_pyproject/NOTICE new file mode 100644 index 00000000..286d2908 --- /dev/null +++ b/setuptools/config/_validate_pyproject/NOTICE @@ -0,0 +1,439 @@ +The code contained in this directory was automatically generated using the +following command: + + python -m validate_pyproject.pre_compile --output-dir=setuptools/config/_validate_pyproject --enable-plugins setuptools distutils --very-verbose + +Please avoid changing it manually. + + +You can report issues or suggest changes directly to `validate-pyproject` +(or to the relevant plugin repository) + +- https://github.com/abravalheri/validate-pyproject/issues + + +*** + +The following files include code from opensource projects +(either as direct copies or modified versions): + +- `fastjsonschema_exceptions.py`: + - project: `fastjsonschema` - licensed under BSD-3-Clause + (https://github.com/horejsek/python-fastjsonschema) +- `extra_validations.py` and `format.py`, `error_reporting.py`: + - project: `validate-pyproject` - licensed under MPL-2.0 + (https://github.com/abravalheri/validate-pyproject) + + +Additionally the following files are automatically generated by tools provided +by the same projects: + +- `__init__.py` +- `fastjsonschema_validations.py` + +The relevant copyright notes and licenses are included below. + + +*** + +`fastjsonschema` +================ + +Copyright (c) 2018, Michal Horejsek +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. + + Neither the name of the {organization} nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +*** + +`validate-pyproject` +==================== + +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. "Contributor" + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. "Contributor Version" + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the terms of + a Secondary License. + +1.6. "Executable Form" + + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + + means a work that combines Covered Software with other material, in a + separate file or files, that is not Covered Software. + +1.8. "License" + + means this document. + +1.9. "Licensable" + + means having the right to grant, to the maximum extent possible, whether + at the time of the initial grant or subsequently, any and all of the + rights conveyed by this License. + +1.10. "Modifications" + + means any of the following: + + a. any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. "Patent Claims" of a Contributor + + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the License, + by the making, using, selling, offering for sale, having made, import, + or transfer of either its Contributions or its Contributor Version. + +1.12. "Secondary License" + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. "Source Code Form" + + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, "control" means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution + become effective for each Contribution on the date the Contributor first + distributes such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under + this License. No additional rights or licenses will be implied from the + distribution or licensing of Covered Software under this License. + Notwithstanding Section 2.1(b) above, no patent license is granted by a + Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of + its Contributions. + + This License does not grant any rights in the trademarks, service marks, + or logos of any Contributor (except as may be necessary to comply with + the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this + License (see Section 10.2) or under the terms of a Secondary License (if + permitted under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its + Contributions are its original creation(s) or it has sufficient rights to + grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under + applicable copyright doctrines of fair use, fair dealing, or other + equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under + the terms of this License. You must inform recipients that the Source + Code Form of the Covered Software is governed by the terms of this + License, and how they can obtain a copy of this License. You may not + attempt to alter or restrict the recipients' rights in the Source Code + Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter the + recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for + the Covered Software. If the Larger Work is a combination of Covered + Software with a work governed by one or more Secondary Licenses, and the + Covered Software is not Incompatible With Secondary Licenses, this + License permits You to additionally distribute such Covered Software + under the terms of such Secondary License(s), so that the recipient of + the Larger Work may, at their option, further distribute the Covered + Software under the terms of either this License or such Secondary + License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices + (including copyright notices, patent notices, disclaimers of warranty, or + limitations of liability) contained within the Source Code Form of the + Covered Software, except that You may alter any license notices to the + extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on + behalf of any Contributor. You must make it absolutely clear that any + such warranty, support, indemnity, or liability obligation is offered by + You alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, + judicial order, or regulation then You must: (a) comply with the terms of + this License to the maximum extent possible; and (b) describe the + limitations and the code they affect. Such description must be placed in a + text file included with all distributions of the Covered Software under + this License. Except to the extent prohibited by statute or regulation, + such description must be sufficiently detailed for a recipient of ordinary + skill to be able to understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing + basis, if such Contributor fails to notify You of the non-compliance by + some reasonable means prior to 60 days after You have come back into + compliance. Moreover, Your grants from a particular Contributor are + reinstated on an ongoing basis if such Contributor notifies You of the + non-compliance by some reasonable means, this is the first time You have + received notice of non-compliance with this License from such + Contributor, and You become compliant prior to 30 days after Your receipt + of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, + counter-claims, and cross-claims) alleging that a Contributor Version + directly or indirectly infringes any patent, then the rights granted to + You by any and all Contributors for the Covered Software under Section + 2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an "as is" basis, + without warranty of any kind, either expressed, implied, or statutory, + including, without limitation, warranties that the Covered Software is free + of defects, merchantable, fit for a particular purpose or non-infringing. + The entire risk as to the quality and performance of the Covered Software + is with You. Should any Covered Software prove defective in any respect, + You (not any Contributor) assume the cost of any necessary servicing, + repair, or correction. This disclaimer of warranty constitutes an essential + part of this License. No use of any Covered Software is authorized under + this License except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from + such party's negligence to the extent applicable law prohibits such + limitation. Some jurisdictions do not allow the exclusion or limitation of + incidental or consequential damages, so this exclusion and limitation may + not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts + of a jurisdiction where the defendant maintains its principal place of + business and such litigation shall be governed by laws of that + jurisdiction, without reference to its conflict-of-law provisions. Nothing + in this Section shall prevent a party's ability to bring cross-claims or + counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. Any law or regulation which provides that + the language of a contract shall be construed against the drafter shall not + be used to construe this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version + of the License under which You originally received the Covered Software, + or under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a + modified version of this License if you rename the license and remove + any references to the name of the license steward (except to note that + such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary + Licenses If You choose to distribute Source Code Form that is + Incompatible With Secondary Licenses under the terms of this version of + the License, the notice described in Exhibit B of this License must be + attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + https://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, +then You may include the notice in a location (such as a LICENSE file in a +relevant directory) where a recipient would be likely to look for such a +notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice + + This Source Code Form is "Incompatible + With Secondary Licenses", as defined by + the Mozilla Public License, v. 2.0. + diff --git a/setuptools/config/_validate_pyproject/__init__.py b/setuptools/config/_validate_pyproject/__init__.py new file mode 100644 index 00000000..dbe6cb4c --- /dev/null +++ b/setuptools/config/_validate_pyproject/__init__.py @@ -0,0 +1,34 @@ +from functools import reduce +from typing import Any, Callable, Dict + +from . import formats +from .error_reporting import detailed_errors, ValidationError +from .extra_validations import EXTRA_VALIDATIONS +from .fastjsonschema_exceptions import JsonSchemaException, JsonSchemaValueException +from .fastjsonschema_validations import validate as _validate + +__all__ = [ + "validate", + "FORMAT_FUNCTIONS", + "EXTRA_VALIDATIONS", + "ValidationError", + "JsonSchemaException", + "JsonSchemaValueException", +] + + +FORMAT_FUNCTIONS: Dict[str, Callable[[str], bool]] = { + fn.__name__.replace("_", "-"): fn + for fn in formats.__dict__.values() + if callable(fn) and not fn.__name__.startswith("_") +} + + +def validate(data: Any) -> bool: + """Validate the given ``data`` object using JSON Schema + This function raises ``ValidationError`` if ``data`` is invalid. + """ + with detailed_errors(): + _validate(data, custom_formats=FORMAT_FUNCTIONS) + reduce(lambda acc, fn: fn(acc), EXTRA_VALIDATIONS, data) + return True diff --git a/setuptools/config/_validate_pyproject/error_reporting.py b/setuptools/config/_validate_pyproject/error_reporting.py new file mode 100644 index 00000000..f78e4838 --- /dev/null +++ b/setuptools/config/_validate_pyproject/error_reporting.py @@ -0,0 +1,318 @@ +import io +import json +import logging +import os +import re +from contextlib import contextmanager +from textwrap import indent, wrap +from typing import Any, Dict, Iterator, List, Optional, Sequence, Union, cast + +from .fastjsonschema_exceptions import JsonSchemaValueException + +_logger = logging.getLogger(__name__) + +_MESSAGE_REPLACEMENTS = { + "must be named by propertyName definition": "keys must be named by", + "one of contains definition": "at least one item that matches", + " same as const definition:": "", + "only specified items": "only items matching the definition", +} + +_SKIP_DETAILS = ( + "must not be empty", + "is always invalid", + "must not be there", +) + +_NEED_DETAILS = {"anyOf", "oneOf", "anyOf", "contains", "propertyNames", "not", "items"} + +_CAMEL_CASE_SPLITTER = re.compile(r"\W+|([A-Z][^A-Z\W]*)") +_IDENTIFIER = re.compile(r"^[\w_]+$", re.I) + +_TOML_JARGON = { + "object": "table", + "property": "key", + "properties": "keys", + "property names": "keys", +} + + +class ValidationError(JsonSchemaValueException): + """Report violations of a given JSON schema. + + This class extends :exc:`~fastjsonschema.JsonSchemaValueException` + by adding the following properties: + + - ``summary``: an improved version of the ``JsonSchemaValueException`` error message + with only the necessary information) + + - ``details``: more contextual information about the error like the failing schema + itself and the value that violates the schema. + + Depending on the level of the verbosity of the ``logging`` configuration + the exception message will be only ``summary`` (default) or a combination of + ``summary`` and ``details`` (when the logging level is set to :obj:`logging.DEBUG`). + """ + + summary = "" + details = "" + _original_message = "" + + @classmethod + def _from_jsonschema(cls, ex: JsonSchemaValueException): + formatter = _ErrorFormatting(ex) + obj = cls(str(formatter), ex.value, formatter.name, ex.definition, ex.rule) + debug_code = os.getenv("JSONSCHEMA_DEBUG_CODE_GENERATION", "false").lower() + if debug_code != "false": # pragma: no cover + obj.__cause__, obj.__traceback__ = ex.__cause__, ex.__traceback__ + obj._original_message = ex.message + obj.summary = formatter.summary + obj.details = formatter.details + return obj + + +@contextmanager +def detailed_errors(): + try: + yield + except JsonSchemaValueException as ex: + raise ValidationError._from_jsonschema(ex) from None + + +class _ErrorFormatting: + def __init__(self, ex: JsonSchemaValueException): + self.ex = ex + self.name = f"`{self._simplify_name(ex.name)}`" + self._original_message = self.ex.message.replace(ex.name, self.name) + self._summary = "" + self._details = "" + + def __str__(self) -> str: + if _logger.getEffectiveLevel() <= logging.DEBUG and self.details: + return f"{self.summary}\n\n{self.details}" + + return self.summary + + @property + def summary(self) -> str: + if not self._summary: + self._summary = self._expand_summary() + + return self._summary + + @property + def details(self) -> str: + if not self._details: + self._details = self._expand_details() + + return self._details + + def _simplify_name(self, name): + x = len("data.") + return name[x:] if name.startswith("data.") else name + + def _expand_summary(self): + msg = self._original_message + + for bad, repl in _MESSAGE_REPLACEMENTS.items(): + msg = msg.replace(bad, repl) + + if any(substring in msg for substring in _SKIP_DETAILS): + return msg + + schema = self.ex.rule_definition + if self.ex.rule in _NEED_DETAILS and schema: + summary = _SummaryWriter(_TOML_JARGON) + return f"{msg}:\n\n{indent(summary(schema), ' ')}" + + return msg + + def _expand_details(self) -> str: + optional = [] + desc_lines = self.ex.definition.pop("$$description", []) + desc = self.ex.definition.pop("description", None) or " ".join(desc_lines) + if desc: + description = "\n".join( + wrap( + desc, + width=80, + initial_indent=" ", + subsequent_indent=" ", + break_long_words=False, + ) + ) + optional.append(f"DESCRIPTION:\n{description}") + schema = json.dumps(self.ex.definition, indent=4) + value = json.dumps(self.ex.value, indent=4) + defaults = [ + f"GIVEN VALUE:\n{indent(value, ' ')}", + f"OFFENDING RULE: {self.ex.rule!r}", + f"DEFINITION:\n{indent(schema, ' ')}", + ] + return "\n\n".join(optional + defaults) + + +class _SummaryWriter: + _IGNORE = {"description", "default", "title", "examples"} + + def __init__(self, jargon: Optional[Dict[str, str]] = None): + self.jargon: Dict[str, str] = jargon or {} + # Clarify confusing terms + self._terms = { + "anyOf": "at least one of the following", + "oneOf": "exactly one of the following", + "allOf": "all of the following", + "not": "(*NOT* the following)", + "prefixItems": f"{self._jargon('items')} (in order)", + "items": "items", + "contains": "contains at least one of", + "propertyNames": ( + f"non-predefined acceptable {self._jargon('property names')}" + ), + "patternProperties": f"{self._jargon('properties')} named via pattern", + "const": "predefined value", + "enum": "one of", + } + # Attributes that indicate that the definition is easy and can be done + # inline (e.g. string and number) + self._guess_inline_defs = [ + "enum", + "const", + "maxLength", + "minLength", + "pattern", + "format", + "minimum", + "maximum", + "exclusiveMinimum", + "exclusiveMaximum", + "multipleOf", + ] + + def _jargon(self, term: Union[str, List[str]]) -> Union[str, List[str]]: + if isinstance(term, list): + return [self.jargon.get(t, t) for t in term] + return self.jargon.get(term, term) + + def __call__( + self, + schema: Union[dict, List[dict]], + prefix: str = "", + *, + _path: Sequence[str] = (), + ) -> str: + if isinstance(schema, list): + return self._handle_list(schema, prefix, _path) + + filtered = self._filter_unecessary(schema, _path) + simple = self._handle_simple_dict(filtered, _path) + if simple: + return f"{prefix}{simple}" + + child_prefix = self._child_prefix(prefix, " ") + item_prefix = self._child_prefix(prefix, "- ") + indent = len(prefix) * " " + with io.StringIO() as buffer: + for i, (key, value) in enumerate(filtered.items()): + child_path = [*_path, key] + line_prefix = prefix if i == 0 else indent + buffer.write(f"{line_prefix}{self._label(child_path)}:") + # ^ just the first item should receive the complete prefix + if isinstance(value, dict): + filtered = self._filter_unecessary(value, child_path) + simple = self._handle_simple_dict(filtered, child_path) + buffer.write( + f" {simple}" + if simple + else f"\n{self(value, child_prefix, _path=child_path)}" + ) + elif isinstance(value, list) and ( + key != "type" or self._is_property(child_path) + ): + children = self._handle_list(value, item_prefix, child_path) + sep = " " if children.startswith("[") else "\n" + buffer.write(f"{sep}{children}") + else: + buffer.write(f" {self._value(value, child_path)}\n") + return buffer.getvalue() + + def _is_unecessary(self, path: Sequence[str]) -> bool: + if self._is_property(path) or not path: # empty path => instruction @ root + return False + key = path[-1] + return any(key.startswith(k) for k in "$_") or key in self._IGNORE + + def _filter_unecessary(self, schema: dict, path: Sequence[str]): + return { + key: value + for key, value in schema.items() + if not self._is_unecessary([*path, key]) + } + + def _handle_simple_dict(self, value: dict, path: Sequence[str]) -> Optional[str]: + inline = any(p in value for p in self._guess_inline_defs) + simple = not any(isinstance(v, (list, dict)) for v in value.values()) + if inline or simple: + return f"{{{', '.join(self._inline_attrs(value, path))}}}\n" + return None + + def _handle_list( + self, schemas: list, prefix: str = "", path: Sequence[str] = () + ) -> str: + if self._is_unecessary(path): + return "" + + repr_ = repr(schemas) + if all(not isinstance(e, (dict, list)) for e in schemas) and len(repr_) < 60: + return f"{repr_}\n" + + item_prefix = self._child_prefix(prefix, "- ") + return "".join( + self(v, item_prefix, _path=[*path, f"[{i}]"]) for i, v in enumerate(schemas) + ) + + def _is_property(self, path: Sequence[str]): + """Check if the given path can correspond to an arbitrarily named property""" + counter = 0 + for key in path[-2::-1]: + if key not in {"properties", "patternProperties"}: + break + counter += 1 + + # If the counter if even, the path correspond to a JSON Schema keyword + # otherwise it can be any arbitrary string naming a property + return counter % 2 == 1 + + def _label(self, path: Sequence[str]) -> str: + *parents, key = path + if not self._is_property(path): + norm_key = _separate_terms(key) + return self._terms.get(key) or " ".join(self._jargon(norm_key)) + + if parents[-1] == "patternProperties": + return f"(regex {key!r})" + return repr(key) # property name + + def _value(self, value: Any, path: Sequence[str]) -> str: + if path[-1] == "type" and not self._is_property(path): + type_ = self._jargon(value) + return ( + f"[{', '.join(type_)}]" if isinstance(value, list) else cast(str, type_) + ) + return repr(value) + + def _inline_attrs(self, schema: dict, path: Sequence[str]) -> Iterator[str]: + for key, value in schema.items(): + child_path = [*path, key] + yield f"{self._label(child_path)}: {self._value(value, child_path)}" + + def _child_prefix(self, parent_prefix: str, child_prefix: str) -> str: + return len(parent_prefix) * " " + child_prefix + + +def _separate_terms(word: str) -> List[str]: + """ + >>> _separate_terms("FooBar-foo") + ['foo', 'bar', 'foo'] + """ + return [w.lower() for w in _CAMEL_CASE_SPLITTER.split(word) if w] diff --git a/setuptools/config/_validate_pyproject/extra_validations.py b/setuptools/config/_validate_pyproject/extra_validations.py new file mode 100644 index 00000000..48c4e257 --- /dev/null +++ b/setuptools/config/_validate_pyproject/extra_validations.py @@ -0,0 +1,36 @@ +"""The purpose of this module is implement PEP 621 validations that are +difficult to express as a JSON Schema (or that are not supported by the current +JSON Schema library). +""" + +from typing import Mapping, TypeVar + +from .fastjsonschema_exceptions import JsonSchemaValueException + +T = TypeVar("T", bound=Mapping) + + +class RedefiningStaticFieldAsDynamic(JsonSchemaValueException): + """According to PEP 621: + + Build back-ends MUST raise an error if the metadata specifies a field + statically as well as being listed in dynamic. + """ + + +def validate_project_dynamic(pyproject: T) -> T: + project_table = pyproject.get("project", {}) + dynamic = project_table.get("dynamic", []) + + for field in dynamic: + if field in project_table: + msg = f"You cannot provide a value for `project.{field}` and " + msg += "list it under `project.dynamic` at the same time" + name = f"data.project.{field}" + value = {field: project_table[field], "...": " # ...", "dynamic": dynamic} + raise RedefiningStaticFieldAsDynamic(msg, value, name, rule="PEP 621") + + return pyproject + + +EXTRA_VALIDATIONS = (validate_project_dynamic,) diff --git a/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py b/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py new file mode 100644 index 00000000..d2dddd6a --- /dev/null +++ b/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py @@ -0,0 +1,51 @@ +import re + + +SPLIT_RE = re.compile(r'[\.\[\]]+') + + +class JsonSchemaException(ValueError): + """ + Base exception of ``fastjsonschema`` library. + """ + + +class JsonSchemaValueException(JsonSchemaException): + """ + Exception raised by validation function. Available properties: + + * ``message`` containing human-readable information what is wrong (e.g. ``data.property[index] must be smaller than or equal to 42``), + * invalid ``value`` (e.g. ``60``), + * ``name`` of a path in the data structure (e.g. ``data.property[index]``), + * ``path`` as an array in the data structure (e.g. ``['data', 'property', 'index']``), + * the whole ``definition`` which the ``value`` has to fulfil (e.g. ``{'type': 'number', 'maximum': 42}``), + * ``rule`` which the ``value`` is breaking (e.g. ``maximum``) + * and ``rule_definition`` (e.g. ``42``). + + .. versionchanged:: 2.14.0 + Added all extra properties. + """ + + def __init__(self, message, value=None, name=None, definition=None, rule=None): + super().__init__(message) + self.message = message + self.value = value + self.name = name + self.definition = definition + self.rule = rule + + @property + def path(self): + return [item for item in SPLIT_RE.split(self.name) if item != ''] + + @property + def rule_definition(self): + if not self.rule or not self.definition: + return None + return self.definition.get(self.rule) + + +class JsonSchemaDefinitionException(JsonSchemaException): + """ + Exception raised by generator of validation function. + """ diff --git a/setuptools/config/_validate_pyproject/fastjsonschema_validations.py b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py new file mode 100644 index 00000000..3ad1edd0 --- /dev/null +++ b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py @@ -0,0 +1,1004 @@ +# noqa +# type: ignore +# flake8: noqa +# pylint: skip-file +# mypy: ignore-errors +# yapf: disable +# pylama:skip=1 + + +# *** PLEASE DO NOT MODIFY DIRECTLY: Automatically generated code *** + + +VERSION = "2.15.3" +import re +from .fastjsonschema_exceptions import JsonSchemaValueException + + +REGEX_PATTERNS = { + '^.*$': re.compile('^.*$'), + '.+': re.compile('.+'), + '^.+$': re.compile('^.+$'), + 'idn-email_re_pattern': re.compile('^[^@]+@[^@]+\\.[^@]+\\Z') +} + +NoneType = type(None) + +def validate(data, custom_formats={}, name_prefix=None): + validate_https___packaging_python_org_en_latest_specifications_declaring_build_dependencies(data, custom_formats, (name_prefix or "data") + "") + return data + +def validate_https___packaging_python_org_en_latest_specifications_declaring_build_dependencies(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '<https://docs.python.org/3/install/#distutils-configuration-files>`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "build-system" in data_keys: + data_keys.remove("build-system") + data__buildsystem = data["build-system"] + if not isinstance(data__buildsystem, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must be object", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='type') + data__buildsystem_is_dict = isinstance(data__buildsystem, dict) + if data__buildsystem_is_dict: + data__buildsystem_len = len(data__buildsystem) + if not all(prop in data__buildsystem for prop in ['requires']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must contain ['requires'] properties", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='required') + data__buildsystem_keys = set(data__buildsystem.keys()) + if "requires" in data__buildsystem_keys: + data__buildsystem_keys.remove("requires") + data__buildsystem__requires = data__buildsystem["requires"] + if not isinstance(data__buildsystem__requires, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.requires must be array", value=data__buildsystem__requires, name="" + (name_prefix or "data") + ".build-system.requires", definition={'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, rule='type') + data__buildsystem__requires_is_list = isinstance(data__buildsystem__requires, (list, tuple)) + if data__buildsystem__requires_is_list: + data__buildsystem__requires_len = len(data__buildsystem__requires) + for data__buildsystem__requires_x, data__buildsystem__requires_item in enumerate(data__buildsystem__requires): + if not isinstance(data__buildsystem__requires_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.requires[{data__buildsystem__requires_x}]".format(**locals()) + " must be string", value=data__buildsystem__requires_item, name="" + (name_prefix or "data") + ".build-system.requires[{data__buildsystem__requires_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "build-backend" in data__buildsystem_keys: + data__buildsystem_keys.remove("build-backend") + data__buildsystem__buildbackend = data__buildsystem["build-backend"] + if not isinstance(data__buildsystem__buildbackend, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.build-backend must be string", value=data__buildsystem__buildbackend, name="" + (name_prefix or "data") + ".build-system.build-backend", definition={'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, rule='type') + if isinstance(data__buildsystem__buildbackend, str): + if not custom_formats["pep517-backend-reference"](data__buildsystem__buildbackend): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.build-backend must be pep517-backend-reference", value=data__buildsystem__buildbackend, name="" + (name_prefix or "data") + ".build-system.build-backend", definition={'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, rule='format') + if "backend-path" in data__buildsystem_keys: + data__buildsystem_keys.remove("backend-path") + data__buildsystem__backendpath = data__buildsystem["backend-path"] + if not isinstance(data__buildsystem__backendpath, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.backend-path must be array", value=data__buildsystem__backendpath, name="" + (name_prefix or "data") + ".build-system.backend-path", definition={'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}, rule='type') + data__buildsystem__backendpath_is_list = isinstance(data__buildsystem__backendpath, (list, tuple)) + if data__buildsystem__backendpath_is_list: + data__buildsystem__backendpath_len = len(data__buildsystem__backendpath) + for data__buildsystem__backendpath_x, data__buildsystem__backendpath_item in enumerate(data__buildsystem__backendpath): + if not isinstance(data__buildsystem__backendpath_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.backend-path[{data__buildsystem__backendpath_x}]".format(**locals()) + " must be string", value=data__buildsystem__backendpath_item, name="" + (name_prefix or "data") + ".build-system.backend-path[{data__buildsystem__backendpath_x}]".format(**locals()) + "", definition={'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}, rule='type') + if data__buildsystem_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must not contain "+str(data__buildsystem_keys)+" properties", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='additionalProperties') + if "project" in data_keys: + data_keys.remove("project") + data__project = data["project"] + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata(data__project, custom_formats, (name_prefix or "data") + ".project") + if "tool" in data_keys: + data_keys.remove("tool") + data__tool = data["tool"] + if not isinstance(data__tool, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".tool must be object", value=data__tool, name="" + (name_prefix or "data") + ".tool", definition={'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '<https://docs.python.org/3/install/#distutils-configuration-files>`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}, rule='type') + data__tool_is_dict = isinstance(data__tool, dict) + if data__tool_is_dict: + data__tool_keys = set(data__tool.keys()) + if "distutils" in data__tool_keys: + data__tool_keys.remove("distutils") + data__tool__distutils = data__tool["distutils"] + validate_https___docs_python_org_3_install(data__tool__distutils, custom_formats, (name_prefix or "data") + ".tool.distutils") + if "setuptools" in data__tool_keys: + data__tool_keys.remove("setuptools") + data__tool__setuptools = data__tool["setuptools"] + validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data__tool__setuptools, custom_formats, (name_prefix or "data") + ".tool.setuptools") + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '<https://docs.python.org/3/install/#distutils-configuration-files>`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties') + return data + +def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "platforms" in data_keys: + data_keys.remove("platforms") + data__platforms = data["platforms"] + if not isinstance(data__platforms, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".platforms must be array", value=data__platforms, name="" + (name_prefix or "data") + ".platforms", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__platforms_is_list = isinstance(data__platforms, (list, tuple)) + if data__platforms_is_list: + data__platforms_len = len(data__platforms) + for data__platforms_x, data__platforms_item in enumerate(data__platforms): + if not isinstance(data__platforms_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".platforms[{data__platforms_x}]".format(**locals()) + " must be string", value=data__platforms_item, name="" + (name_prefix or "data") + ".platforms[{data__platforms_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "provides" in data_keys: + data_keys.remove("provides") + data__provides = data["provides"] + if not isinstance(data__provides, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides must be array", value=data__provides, name="" + (name_prefix or "data") + ".provides", definition={'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, rule='type') + data__provides_is_list = isinstance(data__provides, (list, tuple)) + if data__provides_is_list: + data__provides_len = len(data__provides) + for data__provides_x, data__provides_item in enumerate(data__provides): + if not isinstance(data__provides_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + " must be string", value=data__provides_item, name="" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='type') + if isinstance(data__provides_item, str): + if not custom_formats["pep508-identifier"](data__provides_item): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + " must be pep508-identifier", value=data__provides_item, name="" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='format') + if "obsoletes" in data_keys: + data_keys.remove("obsoletes") + data__obsoletes = data["obsoletes"] + if not isinstance(data__obsoletes, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes must be array", value=data__obsoletes, name="" + (name_prefix or "data") + ".obsoletes", definition={'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, rule='type') + data__obsoletes_is_list = isinstance(data__obsoletes, (list, tuple)) + if data__obsoletes_is_list: + data__obsoletes_len = len(data__obsoletes) + for data__obsoletes_x, data__obsoletes_item in enumerate(data__obsoletes): + if not isinstance(data__obsoletes_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + " must be string", value=data__obsoletes_item, name="" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='type') + if isinstance(data__obsoletes_item, str): + if not custom_formats["pep508-identifier"](data__obsoletes_item): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + " must be pep508-identifier", value=data__obsoletes_item, name="" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='format') + if "zip-safe" in data_keys: + data_keys.remove("zip-safe") + data__zipsafe = data["zip-safe"] + if not isinstance(data__zipsafe, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".zip-safe must be boolean", value=data__zipsafe, name="" + (name_prefix or "data") + ".zip-safe", definition={'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, rule='type') + if "script-files" in data_keys: + data_keys.remove("script-files") + data__scriptfiles = data["script-files"] + if not isinstance(data__scriptfiles, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".script-files must be array", value=data__scriptfiles, name="" + (name_prefix or "data") + ".script-files", definition={'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, rule='type') + data__scriptfiles_is_list = isinstance(data__scriptfiles, (list, tuple)) + if data__scriptfiles_is_list: + data__scriptfiles_len = len(data__scriptfiles) + for data__scriptfiles_x, data__scriptfiles_item in enumerate(data__scriptfiles): + if not isinstance(data__scriptfiles_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".script-files[{data__scriptfiles_x}]".format(**locals()) + " must be string", value=data__scriptfiles_item, name="" + (name_prefix or "data") + ".script-files[{data__scriptfiles_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "eager-resources" in data_keys: + data_keys.remove("eager-resources") + data__eagerresources = data["eager-resources"] + if not isinstance(data__eagerresources, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".eager-resources must be array", value=data__eagerresources, name="" + (name_prefix or "data") + ".eager-resources", definition={'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__eagerresources_is_list = isinstance(data__eagerresources, (list, tuple)) + if data__eagerresources_is_list: + data__eagerresources_len = len(data__eagerresources) + for data__eagerresources_x, data__eagerresources_item in enumerate(data__eagerresources): + if not isinstance(data__eagerresources_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".eager-resources[{data__eagerresources_x}]".format(**locals()) + " must be string", value=data__eagerresources_item, name="" + (name_prefix or "data") + ".eager-resources[{data__eagerresources_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "packages" in data_keys: + data_keys.remove("packages") + data__packages = data["packages"] + data__packages_one_of_count1 = 0 + if data__packages_one_of_count1 < 2: + try: + if not isinstance(data__packages, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be array", value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, rule='type') + data__packages_is_list = isinstance(data__packages, (list, tuple)) + if data__packages_is_list: + data__packages_len = len(data__packages) + for data__packages_x, data__packages_item in enumerate(data__packages): + if not isinstance(data__packages_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + " must be string", value=data__packages_item, name="" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type') + if isinstance(data__packages_item, str): + if not custom_formats["python-module-name"](data__packages_item): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + " must be python-module-name", value=data__packages_item, name="" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format') + data__packages_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data__packages_one_of_count1 < 2: + try: + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_find_directive(data__packages, custom_formats, (name_prefix or "data") + ".packages") + data__packages_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data__packages_one_of_count1 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be valid exactly by one definition" + (" (" + str(data__packages_one_of_count1) + " matches found)"), value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, rule='oneOf') + if "package-dir" in data_keys: + data_keys.remove("package-dir") + data__packagedir = data["package-dir"] + if not isinstance(data__packagedir, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be object", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='type') + data__packagedir_is_dict = isinstance(data__packagedir, dict) + if data__packagedir_is_dict: + data__packagedir_keys = set(data__packagedir.keys()) + for data__packagedir_key, data__packagedir_val in data__packagedir.items(): + if REGEX_PATTERNS['^.*$'].search(data__packagedir_key): + if data__packagedir_key in data__packagedir_keys: + data__packagedir_keys.remove(data__packagedir_key) + if not isinstance(data__packagedir_val, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir.{data__packagedir_key}".format(**locals()) + " must be string", value=data__packagedir_val, name="" + (name_prefix or "data") + ".package-dir.{data__packagedir_key}".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if data__packagedir_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must not contain "+str(data__packagedir_keys)+" properties", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='additionalProperties') + data__packagedir_len = len(data__packagedir) + if data__packagedir_len != 0: + data__packagedir_property_names = True + for data__packagedir_key in data__packagedir: + try: + data__packagedir_key_one_of_count2 = 0 + if data__packagedir_key_one_of_count2 < 2: + try: + if isinstance(data__packagedir_key, str): + if not custom_formats["python-module-name"](data__packagedir_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be python-module-name", value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'format': 'python-module-name'}, rule='format') + data__packagedir_key_one_of_count2 += 1 + except JsonSchemaValueException: pass + if data__packagedir_key_one_of_count2 < 2: + try: + if data__packagedir_key != "": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be same as const definition: ", value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'const': ''}, rule='const') + data__packagedir_key_one_of_count2 += 1 + except JsonSchemaValueException: pass + if data__packagedir_key_one_of_count2 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be valid exactly by one definition" + (" (" + str(data__packagedir_key_one_of_count2) + " matches found)"), value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, rule='oneOf') + except JsonSchemaValueException: + data__packagedir_property_names = False + if not data__packagedir_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be named by propertyName definition", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='propertyNames') + if "package-data" in data_keys: + data_keys.remove("package-data") + data__packagedata = data["package-data"] + if not isinstance(data__packagedata, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be object", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type') + data__packagedata_is_dict = isinstance(data__packagedata, dict) + if data__packagedata_is_dict: + data__packagedata_keys = set(data__packagedata.keys()) + for data__packagedata_key, data__packagedata_val in data__packagedata.items(): + if REGEX_PATTERNS['^.*$'].search(data__packagedata_key): + if data__packagedata_key in data__packagedata_keys: + data__packagedata_keys.remove(data__packagedata_key) + if not isinstance(data__packagedata_val, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data.{data__packagedata_key}".format(**locals()) + " must be array", value=data__packagedata_val, name="" + (name_prefix or "data") + ".package-data.{data__packagedata_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__packagedata_val_is_list = isinstance(data__packagedata_val, (list, tuple)) + if data__packagedata_val_is_list: + data__packagedata_val_len = len(data__packagedata_val) + for data__packagedata_val_x, data__packagedata_val_item in enumerate(data__packagedata_val): + if not isinstance(data__packagedata_val_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data.{data__packagedata_key}[{data__packagedata_val_x}]".format(**locals()) + " must be string", value=data__packagedata_val_item, name="" + (name_prefix or "data") + ".package-data.{data__packagedata_key}[{data__packagedata_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if data__packagedata_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must not contain "+str(data__packagedata_keys)+" properties", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties') + data__packagedata_len = len(data__packagedata) + if data__packagedata_len != 0: + data__packagedata_property_names = True + for data__packagedata_key in data__packagedata: + try: + data__packagedata_key_one_of_count3 = 0 + if data__packagedata_key_one_of_count3 < 2: + try: + if isinstance(data__packagedata_key, str): + if not custom_formats["python-module-name"](data__packagedata_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be python-module-name", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'format': 'python-module-name'}, rule='format') + data__packagedata_key_one_of_count3 += 1 + except JsonSchemaValueException: pass + if data__packagedata_key_one_of_count3 < 2: + try: + if data__packagedata_key != "*": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be same as const definition: *", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'const': '*'}, rule='const') + data__packagedata_key_one_of_count3 += 1 + except JsonSchemaValueException: pass + if data__packagedata_key_one_of_count3 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be valid exactly by one definition" + (" (" + str(data__packagedata_key_one_of_count3) + " matches found)"), value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, rule='oneOf') + except JsonSchemaValueException: + data__packagedata_property_names = False + if not data__packagedata_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be named by propertyName definition", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames') + if "include-package-data" in data_keys: + data_keys.remove("include-package-data") + data__includepackagedata = data["include-package-data"] + if not isinstance(data__includepackagedata, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".include-package-data must be boolean", value=data__includepackagedata, name="" + (name_prefix or "data") + ".include-package-data", definition={'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, rule='type') + if "exclude-package-data" in data_keys: + data_keys.remove("exclude-package-data") + data__excludepackagedata = data["exclude-package-data"] + if not isinstance(data__excludepackagedata, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be object", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type') + data__excludepackagedata_is_dict = isinstance(data__excludepackagedata, dict) + if data__excludepackagedata_is_dict: + data__excludepackagedata_keys = set(data__excludepackagedata.keys()) + for data__excludepackagedata_key, data__excludepackagedata_val in data__excludepackagedata.items(): + if REGEX_PATTERNS['^.*$'].search(data__excludepackagedata_key): + if data__excludepackagedata_key in data__excludepackagedata_keys: + data__excludepackagedata_keys.remove(data__excludepackagedata_key) + if not isinstance(data__excludepackagedata_val, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}".format(**locals()) + " must be array", value=data__excludepackagedata_val, name="" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__excludepackagedata_val_is_list = isinstance(data__excludepackagedata_val, (list, tuple)) + if data__excludepackagedata_val_is_list: + data__excludepackagedata_val_len = len(data__excludepackagedata_val) + for data__excludepackagedata_val_x, data__excludepackagedata_val_item in enumerate(data__excludepackagedata_val): + if not isinstance(data__excludepackagedata_val_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}[{data__excludepackagedata_val_x}]".format(**locals()) + " must be string", value=data__excludepackagedata_val_item, name="" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}[{data__excludepackagedata_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if data__excludepackagedata_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must not contain "+str(data__excludepackagedata_keys)+" properties", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties') + data__excludepackagedata_len = len(data__excludepackagedata) + if data__excludepackagedata_len != 0: + data__excludepackagedata_property_names = True + for data__excludepackagedata_key in data__excludepackagedata: + try: + data__excludepackagedata_key_one_of_count4 = 0 + if data__excludepackagedata_key_one_of_count4 < 2: + try: + if isinstance(data__excludepackagedata_key, str): + if not custom_formats["python-module-name"](data__excludepackagedata_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be python-module-name", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'format': 'python-module-name'}, rule='format') + data__excludepackagedata_key_one_of_count4 += 1 + except JsonSchemaValueException: pass + if data__excludepackagedata_key_one_of_count4 < 2: + try: + if data__excludepackagedata_key != "*": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be same as const definition: *", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'const': '*'}, rule='const') + data__excludepackagedata_key_one_of_count4 += 1 + except JsonSchemaValueException: pass + if data__excludepackagedata_key_one_of_count4 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be valid exactly by one definition" + (" (" + str(data__excludepackagedata_key_one_of_count4) + " matches found)"), value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, rule='oneOf') + except JsonSchemaValueException: + data__excludepackagedata_property_names = False + if not data__excludepackagedata_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be named by propertyName definition", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames') + if "namespace-packages" in data_keys: + data_keys.remove("namespace-packages") + data__namespacepackages = data["namespace-packages"] + if not isinstance(data__namespacepackages, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages must be array", value=data__namespacepackages, name="" + (name_prefix or "data") + ".namespace-packages", definition={'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, rule='type') + data__namespacepackages_is_list = isinstance(data__namespacepackages, (list, tuple)) + if data__namespacepackages_is_list: + data__namespacepackages_len = len(data__namespacepackages) + for data__namespacepackages_x, data__namespacepackages_item in enumerate(data__namespacepackages): + if not isinstance(data__namespacepackages_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + " must be string", value=data__namespacepackages_item, name="" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type') + if isinstance(data__namespacepackages_item, str): + if not custom_formats["python-module-name"](data__namespacepackages_item): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + " must be python-module-name", value=data__namespacepackages_item, name="" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format') + if "py-modules" in data_keys: + data_keys.remove("py-modules") + data__pymodules = data["py-modules"] + if not isinstance(data__pymodules, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules must be array", value=data__pymodules, name="" + (name_prefix or "data") + ".py-modules", definition={'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, rule='type') + data__pymodules_is_list = isinstance(data__pymodules, (list, tuple)) + if data__pymodules_is_list: + data__pymodules_len = len(data__pymodules) + for data__pymodules_x, data__pymodules_item in enumerate(data__pymodules): + if not isinstance(data__pymodules_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be string", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type') + if isinstance(data__pymodules_item, str): + if not custom_formats["python-module-name"](data__pymodules_item): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be python-module-name", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format') + if "data-files" in data_keys: + data_keys.remove("data-files") + data__datafiles = data["data-files"] + if not isinstance(data__datafiles, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files must be object", value=data__datafiles, name="" + (name_prefix or "data") + ".data-files", definition={'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type') + data__datafiles_is_dict = isinstance(data__datafiles, dict) + if data__datafiles_is_dict: + data__datafiles_keys = set(data__datafiles.keys()) + for data__datafiles_key, data__datafiles_val in data__datafiles.items(): + if REGEX_PATTERNS['^.*$'].search(data__datafiles_key): + if data__datafiles_key in data__datafiles_keys: + data__datafiles_keys.remove(data__datafiles_key) + if not isinstance(data__datafiles_val, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files.{data__datafiles_key}".format(**locals()) + " must be array", value=data__datafiles_val, name="" + (name_prefix or "data") + ".data-files.{data__datafiles_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__datafiles_val_is_list = isinstance(data__datafiles_val, (list, tuple)) + if data__datafiles_val_is_list: + data__datafiles_val_len = len(data__datafiles_val) + for data__datafiles_val_x, data__datafiles_val_item in enumerate(data__datafiles_val): + if not isinstance(data__datafiles_val_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files.{data__datafiles_key}[{data__datafiles_val_x}]".format(**locals()) + " must be string", value=data__datafiles_val_item, name="" + (name_prefix or "data") + ".data-files.{data__datafiles_key}[{data__datafiles_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "cmdclass" in data_keys: + data_keys.remove("cmdclass") + data__cmdclass = data["cmdclass"] + if not isinstance(data__cmdclass, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass must be object", value=data__cmdclass, name="" + (name_prefix or "data") + ".cmdclass", definition={'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, rule='type') + data__cmdclass_is_dict = isinstance(data__cmdclass, dict) + if data__cmdclass_is_dict: + data__cmdclass_keys = set(data__cmdclass.keys()) + for data__cmdclass_key, data__cmdclass_val in data__cmdclass.items(): + if REGEX_PATTERNS['^.*$'].search(data__cmdclass_key): + if data__cmdclass_key in data__cmdclass_keys: + data__cmdclass_keys.remove(data__cmdclass_key) + if not isinstance(data__cmdclass_val, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + " must be string", value=data__cmdclass_val, name="" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'python-qualified-identifier'}, rule='type') + if isinstance(data__cmdclass_val, str): + if not custom_formats["python-qualified-identifier"](data__cmdclass_val): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + " must be python-qualified-identifier", value=data__cmdclass_val, name="" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'python-qualified-identifier'}, rule='format') + if "license-files" in data_keys: + data_keys.remove("license-files") + data__licensefiles = data["license-files"] + if not isinstance(data__licensefiles, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license-files must be array", value=data__licensefiles, name="" + (name_prefix or "data") + ".license-files", definition={'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, rule='type') + data__licensefiles_is_list = isinstance(data__licensefiles, (list, tuple)) + if data__licensefiles_is_list: + data__licensefiles_len = len(data__licensefiles) + for data__licensefiles_x, data__licensefiles_item in enumerate(data__licensefiles): + if not isinstance(data__licensefiles_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license-files[{data__licensefiles_x}]".format(**locals()) + " must be string", value=data__licensefiles_item, name="" + (name_prefix or "data") + ".license-files[{data__licensefiles_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + else: data["license-files"] = ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'] + if "dynamic" in data_keys: + data_keys.remove("dynamic") + data__dynamic = data["dynamic"] + if not isinstance(data__dynamic, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be object", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}, rule='type') + data__dynamic_is_dict = isinstance(data__dynamic, dict) + if data__dynamic_is_dict: + data__dynamic_keys = set(data__dynamic.keys()) + if "version" in data__dynamic_keys: + data__dynamic_keys.remove("version") + data__dynamic__version = data__dynamic["version"] + data__dynamic__version_one_of_count5 = 0 + if data__dynamic__version_one_of_count5 < 2: + try: + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_attr_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version") + data__dynamic__version_one_of_count5 += 1 + except JsonSchemaValueException: pass + if data__dynamic__version_one_of_count5 < 2: + try: + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version") + data__dynamic__version_one_of_count5 += 1 + except JsonSchemaValueException: pass + if data__dynamic__version_one_of_count5 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.version must be valid exactly by one definition" + (" (" + str(data__dynamic__version_one_of_count5) + " matches found)"), value=data__dynamic__version, name="" + (name_prefix or "data") + ".dynamic.version", definition={'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, rule='oneOf') + if "classifiers" in data__dynamic_keys: + data__dynamic_keys.remove("classifiers") + data__dynamic__classifiers = data__dynamic["classifiers"] + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__classifiers, custom_formats, (name_prefix or "data") + ".dynamic.classifiers") + if "description" in data__dynamic_keys: + data__dynamic_keys.remove("description") + data__dynamic__description = data__dynamic["description"] + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__description, custom_formats, (name_prefix or "data") + ".dynamic.description") + if "entry-points" in data__dynamic_keys: + data__dynamic_keys.remove("entry-points") + data__dynamic__entrypoints = data__dynamic["entry-points"] + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__entrypoints, custom_formats, (name_prefix or "data") + ".dynamic.entry-points") + if "readme" in data__dynamic_keys: + data__dynamic_keys.remove("readme") + data__dynamic__readme = data__dynamic["readme"] + data__dynamic__readme_any_of_count6 = 0 + if not data__dynamic__readme_any_of_count6: + try: + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__readme, custom_formats, (name_prefix or "data") + ".dynamic.readme") + data__dynamic__readme_any_of_count6 += 1 + except JsonSchemaValueException: pass + if not data__dynamic__readme_any_of_count6: + try: + data__dynamic__readme_is_dict = isinstance(data__dynamic__readme, dict) + if data__dynamic__readme_is_dict: + data__dynamic__readme_keys = set(data__dynamic__readme.keys()) + if "content-type" in data__dynamic__readme_keys: + data__dynamic__readme_keys.remove("content-type") + data__dynamic__readme__contenttype = data__dynamic__readme["content-type"] + if not isinstance(data__dynamic__readme__contenttype, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme.content-type must be string", value=data__dynamic__readme__contenttype, name="" + (name_prefix or "data") + ".dynamic.readme.content-type", definition={'type': 'string'}, rule='type') + data__dynamic__readme_any_of_count6 += 1 + except JsonSchemaValueException: pass + if not data__dynamic__readme_any_of_count6: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme cannot be validated by any definition", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, rule='anyOf') + data__dynamic__readme_is_dict = isinstance(data__dynamic__readme, dict) + if data__dynamic__readme_is_dict: + data__dynamic__readme_len = len(data__dynamic__readme) + if not all(prop in data__dynamic__readme for prop in ['file']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme must contain ['file'] properties", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, rule='required') + if data__dynamic_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must not contain "+str(data__dynamic_keys)+" properties", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}, rule='additionalProperties') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='additionalProperties') + return data + +def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['file']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['file'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='required') + data_keys = set(data.keys()) + if "file" in data_keys: + data_keys.remove("file") + data__file = data["file"] + data__file_one_of_count7 = 0 + if data__file_one_of_count7 < 2: + try: + if not isinstance(data__file, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be string", value=data__file, name="" + (name_prefix or "data") + ".file", definition={'type': 'string'}, rule='type') + data__file_one_of_count7 += 1 + except JsonSchemaValueException: pass + if data__file_one_of_count7 < 2: + try: + if not isinstance(data__file, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be array", value=data__file, name="" + (name_prefix or "data") + ".file", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__file_is_list = isinstance(data__file, (list, tuple)) + if data__file_is_list: + data__file_len = len(data__file) + for data__file_x, data__file_item in enumerate(data__file): + if not isinstance(data__file_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".file[{data__file_x}]".format(**locals()) + " must be string", value=data__file_item, name="" + (name_prefix or "data") + ".file[{data__file_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + data__file_one_of_count7 += 1 + except JsonSchemaValueException: pass + if data__file_one_of_count7 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be valid exactly by one definition" + (" (" + str(data__file_one_of_count7) + " matches found)"), value=data__file, name="" + (name_prefix or "data") + ".file", definition={'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}, rule='oneOf') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='additionalProperties') + return data + +def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_attr_directive(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['attr']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['attr'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='required') + data_keys = set(data.keys()) + if "attr" in data_keys: + data_keys.remove("attr") + data__attr = data["attr"] + if not isinstance(data__attr, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".attr must be string", value=data__attr, name="" + (name_prefix or "data") + ".attr", definition={'type': 'string'}, rule='type') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='additionalProperties') + return data + +def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_find_directive(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "find" in data_keys: + data_keys.remove("find") + data__find = data["find"] + if not isinstance(data__find, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find must be object", value=data__find, name="" + (name_prefix or "data") + ".find", definition={'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}, rule='type') + data__find_is_dict = isinstance(data__find, dict) + if data__find_is_dict: + data__find_keys = set(data__find.keys()) + if "where" in data__find_keys: + data__find_keys.remove("where") + data__find__where = data__find["where"] + if not isinstance(data__find__where, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.where must be array", value=data__find__where, name="" + (name_prefix or "data") + ".find.where", definition={'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__find__where_is_list = isinstance(data__find__where, (list, tuple)) + if data__find__where_is_list: + data__find__where_len = len(data__find__where) + for data__find__where_x, data__find__where_item in enumerate(data__find__where): + if not isinstance(data__find__where_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.where[{data__find__where_x}]".format(**locals()) + " must be string", value=data__find__where_item, name="" + (name_prefix or "data") + ".find.where[{data__find__where_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "exclude" in data__find_keys: + data__find_keys.remove("exclude") + data__find__exclude = data__find["exclude"] + if not isinstance(data__find__exclude, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.exclude must be array", value=data__find__exclude, name="" + (name_prefix or "data") + ".find.exclude", definition={'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, rule='type') + data__find__exclude_is_list = isinstance(data__find__exclude, (list, tuple)) + if data__find__exclude_is_list: + data__find__exclude_len = len(data__find__exclude) + for data__find__exclude_x, data__find__exclude_item in enumerate(data__find__exclude): + if not isinstance(data__find__exclude_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.exclude[{data__find__exclude_x}]".format(**locals()) + " must be string", value=data__find__exclude_item, name="" + (name_prefix or "data") + ".find.exclude[{data__find__exclude_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "include" in data__find_keys: + data__find_keys.remove("include") + data__find__include = data__find["include"] + if not isinstance(data__find__include, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.include must be array", value=data__find__include, name="" + (name_prefix or "data") + ".find.include", definition={'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, rule='type') + data__find__include_is_list = isinstance(data__find__include, (list, tuple)) + if data__find__include_is_list: + data__find__include_len = len(data__find__include) + for data__find__include_x, data__find__include_item in enumerate(data__find__include): + if not isinstance(data__find__include_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.include[{data__find__include_x}]".format(**locals()) + " must be string", value=data__find__include_item, name="" + (name_prefix or "data") + ".find.include[{data__find__include_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "namespaces" in data__find_keys: + data__find_keys.remove("namespaces") + data__find__namespaces = data__find["namespaces"] + if not isinstance(data__find__namespaces, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.namespaces must be boolean", value=data__find__namespaces, name="" + (name_prefix or "data") + ".find.namespaces", definition={'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}, rule='type') + if data__find_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find must not contain "+str(data__find_keys)+" properties", value=data__find, name="" + (name_prefix or "data") + ".find", definition={'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}, rule='additionalProperties') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='additionalProperties') + return data + +def validate_https___docs_python_org_3_install(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '<https://docs.python.org/3/install/#distutils-configuration-files>`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "global" in data_keys: + data_keys.remove("global") + data__global = data["global"] + if not isinstance(data__global, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".global must be object", value=data__global, name="" + (name_prefix or "data") + ".global", definition={'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}, rule='type') + for data_key, data_val in data.items(): + if REGEX_PATTERNS['.+'].search(data_key): + if data_key in data_keys: + data_keys.remove(data_key) + if not isinstance(data_val, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be object", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'object'}, rule='type') + return data + +def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='required') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, rule='type') + if isinstance(data__name, str): + if not custom_formats["pep508-identifier"](data__name): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be pep508-identifier", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, rule='format') + if "version" in data_keys: + data_keys.remove("version") + data__version = data["version"] + if not isinstance(data__version, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".version must be string", value=data__version, name="" + (name_prefix or "data") + ".version", definition={'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, rule='type') + if isinstance(data__version, str): + if not custom_formats["pep440"](data__version): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".version must be pep440", value=data__version, name="" + (name_prefix or "data") + ".version", definition={'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, rule='format') + if "description" in data_keys: + data_keys.remove("description") + data__description = data["description"] + if not isinstance(data__description, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".description must be string", value=data__description, name="" + (name_prefix or "data") + ".description", definition={'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, rule='type') + if "readme" in data_keys: + data_keys.remove("readme") + data__readme = data["readme"] + data__readme_one_of_count8 = 0 + if data__readme_one_of_count8 < 2: + try: + if not isinstance(data__readme, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be string", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, rule='type') + data__readme_one_of_count8 += 1 + except JsonSchemaValueException: pass + if data__readme_one_of_count8 < 2: + try: + if not isinstance(data__readme, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be object", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}, rule='type') + data__readme_any_of_count9 = 0 + if not data__readme_any_of_count9: + try: + data__readme_is_dict = isinstance(data__readme, dict) + if data__readme_is_dict: + data__readme_len = len(data__readme) + if not all(prop in data__readme for prop in ['file']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['file'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, rule='required') + data__readme_keys = set(data__readme.keys()) + if "file" in data__readme_keys: + data__readme_keys.remove("file") + data__readme__file = data__readme["file"] + if not isinstance(data__readme__file, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.file must be string", value=data__readme__file, name="" + (name_prefix or "data") + ".readme.file", definition={'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}, rule='type') + data__readme_any_of_count9 += 1 + except JsonSchemaValueException: pass + if not data__readme_any_of_count9: + try: + data__readme_is_dict = isinstance(data__readme, dict) + if data__readme_is_dict: + data__readme_len = len(data__readme) + if not all(prop in data__readme for prop in ['text']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['text'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}, rule='required') + data__readme_keys = set(data__readme.keys()) + if "text" in data__readme_keys: + data__readme_keys.remove("text") + data__readme__text = data__readme["text"] + if not isinstance(data__readme__text, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.text must be string", value=data__readme__text, name="" + (name_prefix or "data") + ".readme.text", definition={'type': 'string', 'description': 'Full text describing the project.'}, rule='type') + data__readme_any_of_count9 += 1 + except JsonSchemaValueException: pass + if not data__readme_any_of_count9: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme cannot be validated by any definition", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, rule='anyOf') + data__readme_is_dict = isinstance(data__readme, dict) + if data__readme_is_dict: + data__readme_len = len(data__readme) + if not all(prop in data__readme for prop in ['content-type']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['content-type'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}, rule='required') + data__readme_keys = set(data__readme.keys()) + if "content-type" in data__readme_keys: + data__readme_keys.remove("content-type") + data__readme__contenttype = data__readme["content-type"] + if not isinstance(data__readme__contenttype, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.content-type must be string", value=data__readme__contenttype, name="" + (name_prefix or "data") + ".readme.content-type", definition={'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}, rule='type') + data__readme_one_of_count8 += 1 + except JsonSchemaValueException: pass + if data__readme_one_of_count8 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be valid exactly by one definition" + (" (" + str(data__readme_one_of_count8) + " matches found)"), value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, rule='oneOf') + if "requires-python" in data_keys: + data_keys.remove("requires-python") + data__requirespython = data["requires-python"] + if not isinstance(data__requirespython, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".requires-python must be string", value=data__requirespython, name="" + (name_prefix or "data") + ".requires-python", definition={'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, rule='type') + if isinstance(data__requirespython, str): + if not custom_formats["pep508-versionspec"](data__requirespython): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".requires-python must be pep508-versionspec", value=data__requirespython, name="" + (name_prefix or "data") + ".requires-python", definition={'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, rule='format') + if "license" in data_keys: + data_keys.remove("license") + data__license = data["license"] + data__license_one_of_count10 = 0 + if data__license_one_of_count10 < 2: + try: + data__license_is_dict = isinstance(data__license, dict) + if data__license_is_dict: + data__license_len = len(data__license) + if not all(prop in data__license for prop in ['file']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain ['file'] properties", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, rule='required') + data__license_keys = set(data__license.keys()) + if "file" in data__license_keys: + data__license_keys.remove("file") + data__license__file = data__license["file"] + if not isinstance(data__license__file, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.file must be string", value=data__license__file, name="" + (name_prefix or "data") + ".license.file", definition={'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}, rule='type') + data__license_one_of_count10 += 1 + except JsonSchemaValueException: pass + if data__license_one_of_count10 < 2: + try: + data__license_is_dict = isinstance(data__license, dict) + if data__license_is_dict: + data__license_len = len(data__license) + if not all(prop in data__license for prop in ['text']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain ['text'] properties", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}, rule='required') + data__license_keys = set(data__license.keys()) + if "text" in data__license_keys: + data__license_keys.remove("text") + data__license__text = data__license["text"] + if not isinstance(data__license__text, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.text must be string", value=data__license__text, name="" + (name_prefix or "data") + ".license.text", definition={'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}, rule='type') + data__license_one_of_count10 += 1 + except JsonSchemaValueException: pass + if data__license_one_of_count10 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be valid exactly by one definition" + (" (" + str(data__license_one_of_count10) + " matches found)"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, rule='oneOf') + if "authors" in data_keys: + data_keys.remove("authors") + data__authors = data["authors"] + if not isinstance(data__authors, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".authors must be array", value=data__authors, name="" + (name_prefix or "data") + ".authors", definition={'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, rule='type') + data__authors_is_list = isinstance(data__authors, (list, tuple)) + if data__authors_is_list: + data__authors_len = len(data__authors) + for data__authors_x, data__authors_item in enumerate(data__authors): + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data__authors_item, custom_formats, (name_prefix or "data") + ".authors[{data__authors_x}]") + if "maintainers" in data_keys: + data_keys.remove("maintainers") + data__maintainers = data["maintainers"] + if not isinstance(data__maintainers, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".maintainers must be array", value=data__maintainers, name="" + (name_prefix or "data") + ".maintainers", definition={'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, rule='type') + data__maintainers_is_list = isinstance(data__maintainers, (list, tuple)) + if data__maintainers_is_list: + data__maintainers_len = len(data__maintainers) + for data__maintainers_x, data__maintainers_item in enumerate(data__maintainers): + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data__maintainers_item, custom_formats, (name_prefix or "data") + ".maintainers[{data__maintainers_x}]") + if "keywords" in data_keys: + data_keys.remove("keywords") + data__keywords = data["keywords"] + if not isinstance(data__keywords, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".keywords must be array", value=data__keywords, name="" + (name_prefix or "data") + ".keywords", definition={'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, rule='type') + data__keywords_is_list = isinstance(data__keywords, (list, tuple)) + if data__keywords_is_list: + data__keywords_len = len(data__keywords) + for data__keywords_x, data__keywords_item in enumerate(data__keywords): + if not isinstance(data__keywords_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".keywords[{data__keywords_x}]".format(**locals()) + " must be string", value=data__keywords_item, name="" + (name_prefix or "data") + ".keywords[{data__keywords_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "classifiers" in data_keys: + data_keys.remove("classifiers") + data__classifiers = data["classifiers"] + if not isinstance(data__classifiers, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers must be array", value=data__classifiers, name="" + (name_prefix or "data") + ".classifiers", definition={'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, rule='type') + data__classifiers_is_list = isinstance(data__classifiers, (list, tuple)) + if data__classifiers_is_list: + data__classifiers_len = len(data__classifiers) + for data__classifiers_x, data__classifiers_item in enumerate(data__classifiers): + if not isinstance(data__classifiers_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + " must be string", value=data__classifiers_item, name="" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, rule='type') + if isinstance(data__classifiers_item, str): + if not custom_formats["trove-classifier"](data__classifiers_item): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + " must be trove-classifier", value=data__classifiers_item, name="" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, rule='format') + if "urls" in data_keys: + data_keys.remove("urls") + data__urls = data["urls"] + if not isinstance(data__urls, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls must be object", value=data__urls, name="" + (name_prefix or "data") + ".urls", definition={'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, rule='type') + data__urls_is_dict = isinstance(data__urls, dict) + if data__urls_is_dict: + data__urls_keys = set(data__urls.keys()) + for data__urls_key, data__urls_val in data__urls.items(): + if REGEX_PATTERNS['^.+$'].search(data__urls_key): + if data__urls_key in data__urls_keys: + data__urls_keys.remove(data__urls_key) + if not isinstance(data__urls_val, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + " must be string", value=data__urls_val, name="" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'url'}, rule='type') + if isinstance(data__urls_val, str): + if not custom_formats["url"](data__urls_val): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + " must be url", value=data__urls_val, name="" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'url'}, rule='format') + if data__urls_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls must not contain "+str(data__urls_keys)+" properties", value=data__urls, name="" + (name_prefix or "data") + ".urls", definition={'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, rule='additionalProperties') + if "scripts" in data_keys: + data_keys.remove("scripts") + data__scripts = data["scripts"] + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__scripts, custom_formats, (name_prefix or "data") + ".scripts") + if "gui-scripts" in data_keys: + data_keys.remove("gui-scripts") + data__guiscripts = data["gui-scripts"] + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__guiscripts, custom_formats, (name_prefix or "data") + ".gui-scripts") + if "entry-points" in data_keys: + data_keys.remove("entry-points") + data__entrypoints = data["entry-points"] + data__entrypoints_is_dict = isinstance(data__entrypoints, dict) + if data__entrypoints_is_dict: + data__entrypoints_keys = set(data__entrypoints.keys()) + for data__entrypoints_key, data__entrypoints_val in data__entrypoints.items(): + if REGEX_PATTERNS['^.+$'].search(data__entrypoints_key): + if data__entrypoints_key in data__entrypoints_keys: + data__entrypoints_keys.remove(data__entrypoints_key) + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__entrypoints_val, custom_formats, (name_prefix or "data") + ".entry-points.{data__entrypoints_key}") + if data__entrypoints_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must not contain "+str(data__entrypoints_keys)+" properties", value=data__entrypoints, name="" + (name_prefix or "data") + ".entry-points", definition={'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, rule='additionalProperties') + data__entrypoints_len = len(data__entrypoints) + if data__entrypoints_len != 0: + data__entrypoints_property_names = True + for data__entrypoints_key in data__entrypoints: + try: + if isinstance(data__entrypoints_key, str): + if not custom_formats["python-entrypoint-group"](data__entrypoints_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must be python-entrypoint-group", value=data__entrypoints_key, name="" + (name_prefix or "data") + ".entry-points", definition={'format': 'python-entrypoint-group'}, rule='format') + except JsonSchemaValueException: + data__entrypoints_property_names = False + if not data__entrypoints_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must be named by propertyName definition", value=data__entrypoints, name="" + (name_prefix or "data") + ".entry-points", definition={'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, rule='propertyNames') + if "dependencies" in data_keys: + data_keys.remove("dependencies") + data__dependencies = data["dependencies"] + if not isinstance(data__dependencies, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dependencies must be array", value=data__dependencies, name="" + (name_prefix or "data") + ".dependencies", definition={'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, rule='type') + data__dependencies_is_list = isinstance(data__dependencies, (list, tuple)) + if data__dependencies_is_list: + data__dependencies_len = len(data__dependencies) + for data__dependencies_x, data__dependencies_item in enumerate(data__dependencies): + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data__dependencies_item, custom_formats, (name_prefix or "data") + ".dependencies[{data__dependencies_x}]") + if "optional-dependencies" in data_keys: + data_keys.remove("optional-dependencies") + data__optionaldependencies = data["optional-dependencies"] + if not isinstance(data__optionaldependencies, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be object", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type') + data__optionaldependencies_is_dict = isinstance(data__optionaldependencies, dict) + if data__optionaldependencies_is_dict: + data__optionaldependencies_keys = set(data__optionaldependencies.keys()) + for data__optionaldependencies_key, data__optionaldependencies_val in data__optionaldependencies.items(): + if REGEX_PATTERNS['^.+$'].search(data__optionaldependencies_key): + if data__optionaldependencies_key in data__optionaldependencies_keys: + data__optionaldependencies_keys.remove(data__optionaldependencies_key) + if not isinstance(data__optionaldependencies_val, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}".format(**locals()) + " must be array", value=data__optionaldependencies_val, name="" + (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, rule='type') + data__optionaldependencies_val_is_list = isinstance(data__optionaldependencies_val, (list, tuple)) + if data__optionaldependencies_val_is_list: + data__optionaldependencies_val_len = len(data__optionaldependencies_val) + for data__optionaldependencies_val_x, data__optionaldependencies_val_item in enumerate(data__optionaldependencies_val): + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data__optionaldependencies_val_item, custom_formats, (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}[{data__optionaldependencies_val_x}]") + if data__optionaldependencies_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must not contain "+str(data__optionaldependencies_keys)+" properties", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties') + data__optionaldependencies_len = len(data__optionaldependencies) + if data__optionaldependencies_len != 0: + data__optionaldependencies_property_names = True + for data__optionaldependencies_key in data__optionaldependencies: + try: + if isinstance(data__optionaldependencies_key, str): + if not custom_formats["pep508-identifier"](data__optionaldependencies_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be pep508-identifier", value=data__optionaldependencies_key, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'format': 'pep508-identifier'}, rule='format') + except JsonSchemaValueException: + data__optionaldependencies_property_names = False + if not data__optionaldependencies_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be named by propertyName definition", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='propertyNames') + if "dynamic" in data_keys: + data_keys.remove("dynamic") + data__dynamic = data["dynamic"] + if not isinstance(data__dynamic, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be array", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}, rule='type') + data__dynamic_is_list = isinstance(data__dynamic, (list, tuple)) + if data__dynamic_is_list: + data__dynamic_len = len(data__dynamic) + for data__dynamic_x, data__dynamic_item in enumerate(data__dynamic): + if data__dynamic_item not in ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + " must be one of ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']", value=data__dynamic_item, name="" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + "", definition={'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}, rule='enum') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='additionalProperties') + try: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['dynamic']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['dynamic'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, rule='required') + data_keys = set(data.keys()) + if "dynamic" in data_keys: + data_keys.remove("dynamic") + data__dynamic = data["dynamic"] + data__dynamic_is_list = isinstance(data__dynamic, (list, tuple)) + if data__dynamic_is_list: + data__dynamic_contains = False + for data__dynamic_key in data__dynamic: + try: + if data__dynamic_key != "version": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be same as const definition: version", value=data__dynamic_key, name="" + (name_prefix or "data") + ".dynamic", definition={'const': 'version'}, rule='const') + data__dynamic_contains = True + break + except JsonSchemaValueException: pass + if not data__dynamic_contains: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must contain one of contains definition", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}, rule='contains') + except JsonSchemaValueException: pass + else: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must NOT match a disallowed definition", value=data, name="" + (name_prefix or "data") + "", definition={'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, rule='not') + except JsonSchemaValueException: + pass + else: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['version']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['version'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, rule='required') + return data + +def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}, rule='type') + if isinstance(data, str): + if not custom_formats["pep508"](data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be pep508", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}, rule='format') + return data + +def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + for data_key, data_val in data.items(): + if REGEX_PATTERNS['^.+$'].search(data_key): + if data_key in data_keys: + data_keys.remove(data_key) + if not isinstance(data_val, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be string", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}, rule='type') + if isinstance(data_val, str): + if not custom_formats["python-entrypoint-reference"](data_val): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be python-entrypoint-reference", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}, rule='format') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='additionalProperties') + data_len = len(data) + if data_len != 0: + data_property_names = True + for data_key in data: + try: + if isinstance(data_key, str): + if not custom_formats["python-entrypoint-name"](data_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be python-entrypoint-name", value=data_key, name="" + (name_prefix or "data") + "", definition={'format': 'python-entrypoint-name'}, rule='format') + except JsonSchemaValueException: + data_property_names = False + if not data_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be named by propertyName definition", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='propertyNames') + return data + +def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, rule='type') + if "email" in data_keys: + data_keys.remove("email") + data__email = data["email"] + if not isinstance(data__email, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".email must be string", value=data__email, name="" + (name_prefix or "data") + ".email", definition={'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}, rule='type') + if isinstance(data__email, str): + if not REGEX_PATTERNS["idn-email_re_pattern"].match(data__email): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".email must be idn-email", value=data__email, name="" + (name_prefix or "data") + ".email", definition={'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}, rule='format') + return data
\ No newline at end of file diff --git a/setuptools/config/_validate_pyproject/formats.py b/setuptools/config/_validate_pyproject/formats.py new file mode 100644 index 00000000..f41fce38 --- /dev/null +++ b/setuptools/config/_validate_pyproject/formats.py @@ -0,0 +1,257 @@ +import logging +import os +import re +import string +import typing +from itertools import chain as _chain + +_logger = logging.getLogger(__name__) + +# ------------------------------------------------------------------------------------- +# PEP 440 + +VERSION_PATTERN = r""" + v? + (?: + (?:(?P<epoch>[0-9]+)!)? # epoch + (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment + (?P<pre> # pre-release + [-_\.]? + (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview)) + [-_\.]? + (?P<pre_n>[0-9]+)? + )? + (?P<post> # post release + (?:-(?P<post_n1>[0-9]+)) + | + (?: + [-_\.]? + (?P<post_l>post|rev|r) + [-_\.]? + (?P<post_n2>[0-9]+)? + ) + )? + (?P<dev> # dev release + [-_\.]? + (?P<dev_l>dev) + [-_\.]? + (?P<dev_n>[0-9]+)? + )? + ) + (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version +""" + +VERSION_REGEX = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.X | re.I) + + +def pep440(version: str) -> bool: + return VERSION_REGEX.match(version) is not None + + +# ------------------------------------------------------------------------------------- +# PEP 508 + +PEP508_IDENTIFIER_PATTERN = r"([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])" +PEP508_IDENTIFIER_REGEX = re.compile(f"^{PEP508_IDENTIFIER_PATTERN}$", re.I) + + +def pep508_identifier(name: str) -> bool: + return PEP508_IDENTIFIER_REGEX.match(name) is not None + + +try: + try: + from packaging import requirements as _req + except ImportError: # pragma: no cover + # let's try setuptools vendored version + from setuptools._vendor.packaging import requirements as _req # type: ignore + + def pep508(value: str) -> bool: + try: + _req.Requirement(value) + return True + except _req.InvalidRequirement: + return False + +except ImportError: # pragma: no cover + _logger.warning( + "Could not find an installation of `packaging`. Requirements, dependencies and " + "versions might not be validated. " + "To enforce validation, please install `packaging`." + ) + + def pep508(value: str) -> bool: + return True + + +def pep508_versionspec(value: str) -> bool: + """Expression that can be used to specify/lock versions (including ranges)""" + if any(c in value for c in (";", "]", "@")): + # In PEP 508: + # conditional markers, extras and URL specs are not included in the + # versionspec + return False + # Let's pretend we have a dependency called `requirement` with the given + # version spec, then we can re-use the pep508 function for validation: + return pep508(f"requirement{value}") + + +# ------------------------------------------------------------------------------------- +# PEP 517 + + +def pep517_backend_reference(value: str) -> bool: + module, _, obj = value.partition(":") + identifiers = (i.strip() for i in _chain(module.split("."), obj.split("."))) + return all(python_identifier(i) for i in identifiers if i) + + +# ------------------------------------------------------------------------------------- +# Classifiers - PEP 301 + + +def _download_classifiers() -> str: + import cgi + from urllib.request import urlopen + + url = "https://pypi.org/pypi?:action=list_classifiers" + with urlopen(url) as response: + content_type = response.getheader("content-type", "text/plain") + encoding = cgi.parse_header(content_type)[1].get("charset", "utf-8") + return response.read().decode(encoding) + + +class _TroveClassifier: + """The ``trove_classifiers`` package is the official way of validating classifiers, + however this package might not be always available. + As a workaround we can still download a list from PyPI. + We also don't want to be over strict about it, so simply skipping silently is an + option (classifiers will be validated anyway during the upload to PyPI). + """ + + def __init__(self): + self.downloaded: typing.Union[None, False, typing.Set[str]] = None + self._skip_download = False + # None => not cached yet + # False => cache not available + self.__name__ = "trove_classifier" # Emulate a public function + + def _disable_download(self): + # This is a private API. Only setuptools has the consent of using it. + self._skip_download = True + + def __call__(self, value: str) -> bool: + if self.downloaded is False or self._skip_download is True: + return True + + if os.getenv("NO_NETWORK") or os.getenv("VALIDATE_PYPROJECT_NO_NETWORK"): + self.downloaded = False + msg = ( + "Install ``trove-classifiers`` to ensure proper validation. " + "Skipping download of classifiers list from PyPI (NO_NETWORK)." + ) + _logger.debug(msg) + return True + + if self.downloaded is None: + msg = ( + "Install ``trove-classifiers`` to ensure proper validation. " + "Meanwhile a list of classifiers will be downloaded from PyPI." + ) + _logger.debug(msg) + try: + self.downloaded = set(_download_classifiers().splitlines()) + except Exception: + self.downloaded = False + _logger.debug("Problem with download, skipping validation") + return True + + return value in self.downloaded or value.lower().startswith("private ::") + + +try: + from trove_classifiers import classifiers as _trove_classifiers + + def trove_classifier(value: str) -> bool: + return value in _trove_classifiers or value.lower().startswith("private ::") + +except ImportError: # pragma: no cover + trove_classifier = _TroveClassifier() + + +# ------------------------------------------------------------------------------------- +# Non-PEP related + + +def url(value: str) -> bool: + from urllib.parse import urlparse + + try: + parts = urlparse(value) + if not parts.scheme: + _logger.warning( + "For maximum compatibility please make sure to include a " + "`scheme` prefix in your URL (e.g. 'http://'). " + f"Given value: {value}" + ) + if not (value.startswith("/") or value.startswith("\\") or "@" in value): + parts = urlparse(f"http://{value}") + + return bool(parts.scheme and parts.netloc) + except Exception: + return False + + +# https://packaging.python.org/specifications/entry-points/ +ENTRYPOINT_PATTERN = r"[^\[\s=]([^=]*[^\s=])?" +ENTRYPOINT_REGEX = re.compile(f"^{ENTRYPOINT_PATTERN}$", re.I) +RECOMMEDED_ENTRYPOINT_PATTERN = r"[\w.-]+" +RECOMMEDED_ENTRYPOINT_REGEX = re.compile(f"^{RECOMMEDED_ENTRYPOINT_PATTERN}$", re.I) +ENTRYPOINT_GROUP_PATTERN = r"\w+(\.\w+)*" +ENTRYPOINT_GROUP_REGEX = re.compile(f"^{ENTRYPOINT_GROUP_PATTERN}$", re.I) + + +def python_identifier(value: str) -> bool: + return value.isidentifier() + + +def python_qualified_identifier(value: str) -> bool: + if value.startswith(".") or value.endswith("."): + return False + return all(python_identifier(m) for m in value.split(".")) + + +def python_module_name(value: str) -> bool: + return python_qualified_identifier(value) + + +def python_entrypoint_group(value: str) -> bool: + return ENTRYPOINT_GROUP_REGEX.match(value) is not None + + +def python_entrypoint_name(value: str) -> bool: + if not ENTRYPOINT_REGEX.match(value): + return False + if not RECOMMEDED_ENTRYPOINT_REGEX.match(value): + msg = f"Entry point `{value}` does not follow recommended pattern: " + msg += RECOMMEDED_ENTRYPOINT_PATTERN + _logger.warning(msg) + return True + + +def python_entrypoint_reference(value: str) -> bool: + module, _, rest = value.partition(":") + if "[" in rest: + obj, _, extras_ = rest.partition("[") + if extras_.strip()[-1] != "]": + return False + extras = (x.strip() for x in extras_.strip(string.whitespace + "[]").split(",")) + if not all(pep508_identifier(e) for e in extras): + return False + _logger.warning(f"`{value}` - using extras for entry points is not recommended") + else: + obj = rest + + module_parts = module.split(".") + identifiers = _chain(module_parts, obj.split(".")) if rest else module_parts + return all(python_identifier(i.strip()) for i in identifiers) diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py new file mode 100644 index 00000000..da55d4ee --- /dev/null +++ b/setuptools/config/expand.py @@ -0,0 +1,481 @@ +"""Utility functions to expand configuration directives or special values +(such glob patterns). + +We can split the process of interpreting configuration files into 2 steps: + +1. The parsing the file contents from strings to value objects + that can be understand by Python (for example a string with a comma + separated list of keywords into an actual Python list of strings). + +2. The expansion (or post-processing) of these values according to the + semantics ``setuptools`` assign to them (for example a configuration field + with the ``file:`` directive should be expanded from a list of file paths to + a single string with the contents of those files concatenated) + +This module focus on the second step, and therefore allow sharing the expansion +functions among several configuration file formats. +""" +import ast +import importlib +import io +import os +import sys +import warnings +from glob import iglob +from configparser import ConfigParser +from importlib.machinery import ModuleSpec +from itertools import chain +from typing import ( + TYPE_CHECKING, + Callable, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Tuple, + TypeVar, + Union, + cast +) +from types import ModuleType + +from distutils.errors import DistutilsOptionError + +if TYPE_CHECKING: + from setuptools.dist import Distribution # noqa + from setuptools.discovery import ConfigDiscovery # noqa + from distutils.dist import DistributionMetadata # noqa + +chain_iter = chain.from_iterable +_Path = Union[str, os.PathLike] +_K = TypeVar("_K") +_V = TypeVar("_V", covariant=True) + + +class StaticModule: + """Proxy to a module object that avoids executing arbitrary code.""" + + def __init__(self, name: str, spec: ModuleSpec): + with open(spec.origin) as strm: # type: ignore + src = strm.read() + module = ast.parse(src) + vars(self).update(locals()) + del self.self + + def __getattr__(self, attr): + """Attempt to load an attribute "statically", via :func:`ast.literal_eval`.""" + try: + assignment_expressions = ( + statement + for statement in self.module.body + if isinstance(statement, ast.Assign) + ) + expressions_with_target = ( + (statement, target) + for statement in assignment_expressions + for target in statement.targets + ) + matching_values = ( + statement.value + for statement, target in expressions_with_target + if isinstance(target, ast.Name) and target.id == attr + ) + return next(ast.literal_eval(value) for value in matching_values) + except Exception as e: + raise AttributeError(f"{self.name} has no attribute {attr}") from e + + +def glob_relative( + patterns: Iterable[str], root_dir: Optional[_Path] = None +) -> List[str]: + """Expand the list of glob patterns, but preserving relative paths. + + :param list[str] patterns: List of glob patterns + :param str root_dir: Path to which globs should be relative + (current directory by default) + :rtype: list + """ + glob_characters = {'*', '?', '[', ']', '{', '}'} + expanded_values = [] + root_dir = root_dir or os.getcwd() + for value in patterns: + + # Has globby characters? + if any(char in value for char in glob_characters): + # then expand the glob pattern while keeping paths *relative*: + glob_path = os.path.abspath(os.path.join(root_dir, value)) + expanded_values.extend(sorted( + os.path.relpath(path, root_dir).replace(os.sep, "/") + for path in iglob(glob_path, recursive=True))) + + else: + # take the value as-is + path = os.path.relpath(value, root_dir).replace(os.sep, "/") + expanded_values.append(path) + + return expanded_values + + +def read_files(filepaths: Union[str, bytes, Iterable[_Path]], root_dir=None) -> str: + """Return the content of the files concatenated using ``\n`` as str + + This function is sandboxed and won't reach anything outside ``root_dir`` + + (By default ``root_dir`` is the current directory). + """ + from setuptools.extern.more_itertools import always_iterable + + root_dir = os.path.abspath(root_dir or os.getcwd()) + _filepaths = (os.path.join(root_dir, path) for path in always_iterable(filepaths)) + return '\n'.join( + _read_file(path) + for path in _filter_existing_files(_filepaths) + if _assert_local(path, root_dir) + ) + + +def _filter_existing_files(filepaths: Iterable[_Path]) -> Iterator[_Path]: + for path in filepaths: + if os.path.isfile(path): + yield path + else: + warnings.warn(f"File {path!r} cannot be found") + + +def _read_file(filepath: Union[bytes, _Path]) -> str: + with io.open(filepath, encoding='utf-8') as f: + return f.read() + + +def _assert_local(filepath: _Path, root_dir: str): + if not os.path.abspath(filepath).startswith(root_dir): + msg = f"Cannot access {filepath!r} (or anything outside {root_dir!r})" + raise DistutilsOptionError(msg) + + return True + + +def read_attr( + attr_desc: str, + package_dir: Optional[Mapping[str, str]] = None, + root_dir: Optional[_Path] = None +): + """Reads the value of an attribute from a module. + + This function will try to read the attributed statically first + (via :func:`ast.literal_eval`), and only evaluate the module if it fails. + + Examples: + read_attr("package.attr") + read_attr("package.module.attr") + + :param str attr_desc: Dot-separated string describing how to reach the + attribute (see examples above) + :param dict[str, str] package_dir: Mapping of package names to their + location in disk (represented by paths relative to ``root_dir``). + :param str root_dir: Path to directory containing all the packages in + ``package_dir`` (current directory by default). + :rtype: str + """ + root_dir = root_dir or os.getcwd() + attrs_path = attr_desc.strip().split('.') + attr_name = attrs_path.pop() + module_name = '.'.join(attrs_path) + module_name = module_name or '__init__' + _parent_path, path, module_name = _find_module(module_name, package_dir, root_dir) + spec = _find_spec(module_name, path) + + try: + return getattr(StaticModule(module_name, spec), attr_name) + except Exception: + # fallback to evaluate module + module = _load_spec(spec, module_name) + return getattr(module, attr_name) + + +def _find_spec(module_name: str, module_path: Optional[_Path]) -> ModuleSpec: + spec = importlib.util.spec_from_file_location(module_name, module_path) + spec = spec or importlib.util.find_spec(module_name) + + if spec is None: + raise ModuleNotFoundError(module_name) + + return spec + + +def _load_spec(spec: ModuleSpec, module_name: str) -> ModuleType: + name = getattr(spec, "__name__", module_name) + if name in sys.modules: + return sys.modules[name] + module = importlib.util.module_from_spec(spec) + sys.modules[name] = module # cache (it also ensures `==` works on loaded items) + spec.loader.exec_module(module) # type: ignore + return module + + +def _find_module( + module_name: str, package_dir: Optional[Mapping[str, str]], root_dir: _Path +) -> Tuple[_Path, Optional[str], str]: + """Given a module (that could normally be imported by ``module_name`` + after the build is complete), find the path to the parent directory where + it is contained and the canonical name that could be used to import it + considering the ``package_dir`` in the build configuration and ``root_dir`` + """ + parent_path = root_dir + module_parts = module_name.split('.') + if package_dir: + if module_parts[0] in package_dir: + # A custom path was specified for the module we want to import + custom_path = package_dir[module_parts[0]] + parts = custom_path.rsplit('/', 1) + if len(parts) > 1: + parent_path = os.path.join(root_dir, parts[0]) + parent_module = parts[1] + else: + parent_module = custom_path + module_name = ".".join([parent_module, *module_parts[1:]]) + elif '' in package_dir: + # A custom parent directory was specified for all root modules + parent_path = os.path.join(root_dir, package_dir['']) + + path_start = os.path.join(parent_path, *module_name.split(".")) + candidates = chain( + (f"{path_start}.py", os.path.join(path_start, "__init__.py")), + iglob(f"{path_start}.*") + ) + module_path = next((x for x in candidates if os.path.isfile(x)), None) + return parent_path, module_path, module_name + + +def resolve_class( + qualified_class_name: str, + package_dir: Optional[Mapping[str, str]] = None, + root_dir: Optional[_Path] = None +) -> Callable: + """Given a qualified class name, return the associated class object""" + root_dir = root_dir or os.getcwd() + idx = qualified_class_name.rfind('.') + class_name = qualified_class_name[idx + 1 :] + pkg_name = qualified_class_name[:idx] + + _parent_path, path, module_name = _find_module(pkg_name, package_dir, root_dir) + module = _load_spec(_find_spec(module_name, path), module_name) + return getattr(module, class_name) + + +def cmdclass( + values: Dict[str, str], + package_dir: Optional[Mapping[str, str]] = None, + root_dir: Optional[_Path] = None +) -> Dict[str, Callable]: + """Given a dictionary mapping command names to strings for qualified class + names, apply :func:`resolve_class` to the dict values. + """ + return {k: resolve_class(v, package_dir, root_dir) for k, v in values.items()} + + +def find_packages( + *, + namespaces=True, + fill_package_dir: Optional[Dict[str, str]] = None, + root_dir: Optional[_Path] = None, + **kwargs +) -> List[str]: + """Works similarly to :func:`setuptools.find_packages`, but with all + arguments given as keyword arguments. Moreover, ``where`` can be given + as a list (the results will be simply concatenated). + + When the additional keyword argument ``namespaces`` is ``True``, it will + behave like :func:`setuptools.find_namespace_packages`` (i.e. include + implicit namespaces as per :pep:`420`). + + The ``where`` argument will be considered relative to ``root_dir`` (or the current + working directory when ``root_dir`` is not given). + + If the ``fill_package_dir`` argument is passed, this function will consider it as a + similar data structure to the ``package_dir`` configuration parameter add fill-in + any missing package location. + + :rtype: list + """ + from setuptools.discovery import construct_package_dir + from setuptools.extern.more_itertools import unique_everseen, always_iterable + + if namespaces: + from setuptools.discovery import PEP420PackageFinder as PackageFinder + else: + from setuptools.discovery import PackageFinder # type: ignore + + root_dir = root_dir or os.curdir + where = kwargs.pop('where', ['.']) + packages: List[str] = [] + fill_package_dir = {} if fill_package_dir is None else fill_package_dir + search = list(unique_everseen(always_iterable(where))) + + if len(search) == 1 and all(not _same_path(search[0], x) for x in (".", root_dir)): + fill_package_dir.setdefault("", search[0]) + + for path in search: + package_path = _nest_path(root_dir, path) + pkgs = PackageFinder.find(package_path, **kwargs) + packages.extend(pkgs) + if pkgs and not ( + fill_package_dir.get("") == path + or os.path.samefile(package_path, root_dir) + ): + fill_package_dir.update(construct_package_dir(pkgs, path)) + + return packages + + +def _same_path(p1: _Path, p2: _Path) -> bool: + """Differs from os.path.samefile because it does not require paths to exist. + Purely string based (no comparison between i-nodes). + >>> _same_path("a/b", "./a/b") + True + >>> _same_path("a/b", "a/./b") + True + >>> _same_path("a/b", "././a/b") + True + >>> _same_path("a/b", "./a/b/c/..") + True + >>> _same_path("a/b", "../a/b/c") + False + >>> _same_path("a", "a/b") + False + """ + return os.path.normpath(p1) == os.path.normpath(p2) + + +def _nest_path(parent: _Path, path: _Path) -> str: + path = parent if path in {".", ""} else os.path.join(parent, path) + return os.path.normpath(path) + + +def version(value: Union[Callable, Iterable[Union[str, int]], str]) -> str: + """When getting the version directly from an attribute, + it should be normalised to string. + """ + if callable(value): + value = value() + + value = cast(Iterable[Union[str, int]], value) + + if not isinstance(value, str): + if hasattr(value, '__iter__'): + value = '.'.join(map(str, value)) + else: + value = '%s' % value + + return value + + +def canonic_package_data(package_data: dict) -> dict: + if "*" in package_data: + package_data[""] = package_data.pop("*") + return package_data + + +def canonic_data_files( + data_files: Union[list, dict], root_dir: Optional[_Path] = None +) -> List[Tuple[str, List[str]]]: + """For compatibility with ``setup.py``, ``data_files`` should be a list + of pairs instead of a dict. + + This function also expands glob patterns. + """ + if isinstance(data_files, list): + return data_files + + return [ + (dest, glob_relative(patterns, root_dir)) + for dest, patterns in data_files.items() + ] + + +def entry_points(text: str, text_source="entry-points") -> Dict[str, dict]: + """Given the contents of entry-points file, + process it into a 2-level dictionary (``dict[str, dict[str, str]]``). + The first level keys are entry-point groups, the second level keys are + entry-point names, and the second level values are references to objects + (that correspond to the entry-point value). + """ + parser = ConfigParser(default_section=None, delimiters=("=",)) # type: ignore + parser.optionxform = str # case sensitive + parser.read_string(text, text_source) + groups = {k: dict(v.items()) for k, v in parser.items()} + groups.pop(parser.default_section, None) + return groups + + +class EnsurePackagesDiscovered: + """Some expand functions require all the packages to already be discovered before + they run, e.g. :func:`read_attr`, :func:`resolve_class`, :func:`cmdclass`. + + Therefore in some cases we will need to run autodiscovery during the evaluation of + the configuration. However, it is better to postpone calling package discovery as + much as possible, because some parameters can influence it (e.g. ``package_dir``), + and those might not have been processed yet. + """ + + def __init__(self, distribution: "Distribution"): + self._dist = distribution + self._called = False + + def __call__(self): + """Trigger the automatic package discovery, if it is still necessary.""" + if not self._called: + self._called = True + self._dist.set_defaults(name=False) # Skip name, we can still be parsing + + def __enter__(self): + return self + + def __exit__(self, _exc_type, _exc_value, _traceback): + if self._called: + self._dist.set_defaults.analyse_name() # Now we can set a default name + + def _get_package_dir(self) -> Mapping[str, str]: + self() + pkg_dir = self._dist.package_dir + return {} if pkg_dir is None else pkg_dir + + @property + def package_dir(self) -> Mapping[str, str]: + """Proxy to ``package_dir`` that may trigger auto-discovery when used.""" + return LazyMappingProxy(self._get_package_dir) + + +class LazyMappingProxy(Mapping[_K, _V]): + """Mapping proxy that delays resolving the target object, until really needed. + + >>> def obtain_mapping(): + ... print("Running expensive function!") + ... return {"key": "value", "other key": "other value"} + >>> mapping = LazyMappingProxy(obtain_mapping) + >>> mapping["key"] + Running expensive function! + 'value' + >>> mapping["other key"] + 'other value' + """ + + def __init__(self, obtain_mapping_value: Callable[[], Mapping[_K, _V]]): + self._obtain = obtain_mapping_value + self._value: Optional[Mapping[_K, _V]] = None + + def _target(self) -> Mapping[_K, _V]: + if self._value is None: + self._value = self._obtain() + return self._value + + def __getitem__(self, key: _K) -> _V: + return self._target()[key] + + def __len__(self) -> int: + return len(self._target()) + + def __iter__(self) -> Iterator[_K]: + return iter(self._target()) diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py new file mode 100644 index 00000000..be812142 --- /dev/null +++ b/setuptools/config/pyprojecttoml.py @@ -0,0 +1,440 @@ +"""Load setuptools configuration from ``pyproject.toml`` files""" +import logging +import os +import warnings +from contextlib import contextmanager +from functools import partial +from typing import TYPE_CHECKING, Callable, Dict, Optional, Mapping, Union + +from setuptools.errors import FileError, OptionError + +from . import expand as _expand +from ._apply_pyprojecttoml import apply as _apply +from ._apply_pyprojecttoml import _PREVIOUSLY_DEFINED, _WouldIgnoreField + +if TYPE_CHECKING: + from setuptools.dist import Distribution # noqa + +_Path = Union[str, os.PathLike] +_logger = logging.getLogger(__name__) + + +def load_file(filepath: _Path) -> dict: + from setuptools.extern import tomli # type: ignore + + with open(filepath, "rb") as file: + return tomli.load(file) + + +def validate(config: dict, filepath: _Path) -> bool: + from . import _validate_pyproject as validator + + trove_classifier = validator.FORMAT_FUNCTIONS.get("trove-classifier") + if hasattr(trove_classifier, "_disable_download"): + # Improve reproducibility by default. See issue 31 for validate-pyproject. + trove_classifier._disable_download() # type: ignore + + try: + return validator.validate(config) + except validator.ValidationError as ex: + _logger.error(f"configuration error: {ex.summary}") # type: ignore + _logger.debug(ex.details) # type: ignore + error = ValueError(f"invalid pyproject.toml config: {ex.name}") # type: ignore + raise error from None + + +def apply_configuration( + dist: "Distribution", + filepath: _Path, + ignore_option_errors=False, +) -> "Distribution": + """Apply the configuration from a ``pyproject.toml`` file into an existing + distribution object. + """ + config = read_configuration(filepath, True, ignore_option_errors, dist) + return _apply(dist, config, filepath) + + +def read_configuration( + filepath: _Path, + expand=True, + ignore_option_errors=False, + dist: Optional["Distribution"] = None, +): + """Read given configuration file and returns options from it as a dict. + + :param str|unicode filepath: Path to configuration file in the ``pyproject.toml`` + format. + + :param bool expand: Whether to expand directives and other computed values + (i.e. post-process the given configuration) + + :param bool ignore_option_errors: Whether to silently ignore + options, values of which could not be resolved (e.g. due to exceptions + in directives such as file:, attr:, etc.). + If False exceptions are propagated as expected. + + :param Distribution|None: Distribution object to which the configuration refers. + If not given a dummy object will be created and discarded after the + configuration is read. This is used for auto-discovery of packages in the case + a dynamic configuration (e.g. ``attr`` or ``cmdclass``) is expanded. + When ``expand=False`` this object is simply ignored. + + :rtype: dict + """ + filepath = os.path.abspath(filepath) + + if not os.path.isfile(filepath): + raise FileError(f"Configuration file {filepath!r} does not exist.") + + asdict = load_file(filepath) or {} + project_table = asdict.get("project", {}) + tool_table = asdict.get("tool", {}) + setuptools_table = tool_table.get("setuptools", {}) + if not asdict or not (project_table or setuptools_table): + return {} # User is not using pyproject to configure setuptools + + # TODO: Remove the following once the feature stabilizes: + msg = ( + "Support for project metadata in `pyproject.toml` is still experimental " + "and may be removed (or change) in future releases." + ) + warnings.warn(msg, _ExperimentalProjectMetadata) + + # There is an overall sense in the community that making include_package_data=True + # the default would be an improvement. + # `ini2toml` backfills include_package_data=False when nothing is explicitly given, + # therefore setting a default here is backwards compatible. + orig_setuptools_table = setuptools_table.copy() + if dist and getattr(dist, "include_package_data") is not None: + setuptools_table.setdefault("include-package-data", dist.include_package_data) + else: + setuptools_table.setdefault("include-package-data", True) + # Persist changes: + asdict["tool"] = tool_table + tool_table["setuptools"] = setuptools_table + + try: + # Don't complain about unrelated errors (e.g. tools not using the "tool" table) + subset = {"project": project_table, "tool": {"setuptools": setuptools_table}} + validate(subset, filepath) + except Exception as ex: + # TODO: Remove the following once the feature stabilizes: + if _skip_bad_config(project_table, orig_setuptools_table, dist): + return {} + # TODO: After the previous statement is removed the try/except can be replaced + # by the _ignore_errors context manager. + if ignore_option_errors: + _logger.debug(f"ignored error: {ex.__class__.__name__} - {ex}") + else: + raise # re-raise exception + + if expand: + root_dir = os.path.dirname(filepath) + return expand_configuration(asdict, root_dir, ignore_option_errors, dist) + + return asdict + + +def _skip_bad_config( + project_cfg: dict, setuptools_cfg: dict, dist: Optional["Distribution"] +) -> bool: + """Be temporarily forgiving with invalid ``pyproject.toml``""" + # See pypa/setuptools#3199 and pypa/cibuildwheel#1064 + + if dist is None or ( + dist.metadata.name is None + and dist.metadata.version is None + and dist.install_requires is None + ): + # It seems that the build is not getting any configuration from other places + return False + + if setuptools_cfg: + # If `[tool.setuptools]` is set, then `pyproject.toml` config is intentional + return False + + given_config = set(project_cfg.keys()) + popular_subset = {"name", "version", "python_requires", "requires-python"} + if given_config <= popular_subset: + # It seems that the docs in cibuildtool has been inadvertently encouraging users + # to create `pyproject.toml` files that are not compliant with the standards. + # Let's be forgiving for the time being. + warnings.warn(_InvalidFile.message(), _InvalidFile, stacklevel=2) + return True + + return False + + +def expand_configuration( + config: dict, + root_dir: Optional[_Path] = None, + ignore_option_errors: bool = False, + dist: Optional["Distribution"] = None, +) -> dict: + """Given a configuration with unresolved fields (e.g. dynamic, cmdclass, ...) + find their final values. + + :param dict config: Dict containing the configuration for the distribution + :param str root_dir: Top-level directory for the distribution/project + (the same directory where ``pyproject.toml`` is place) + :param bool ignore_option_errors: see :func:`read_configuration` + :param Distribution|None: Distribution object to which the configuration refers. + If not given a dummy object will be created and discarded after the + configuration is read. Used in the case a dynamic configuration + (e.g. ``attr`` or ``cmdclass``). + + :rtype: dict + """ + return _ConfigExpander(config, root_dir, ignore_option_errors, dist).expand() + + +class _ConfigExpander: + def __init__( + self, + config: dict, + root_dir: Optional[_Path] = None, + ignore_option_errors: bool = False, + dist: Optional["Distribution"] = None, + ): + self.config = config + self.root_dir = root_dir or os.getcwd() + self.project_cfg = config.get("project", {}) + self.dynamic = self.project_cfg.get("dynamic", []) + self.setuptools_cfg = config.get("tool", {}).get("setuptools", {}) + self.dynamic_cfg = self.setuptools_cfg.get("dynamic", {}) + self.ignore_option_errors = ignore_option_errors + self._dist = dist + + def _ensure_dist(self) -> "Distribution": + from setuptools.dist import Distribution + + attrs = {"src_root": self.root_dir, "name": self.project_cfg.get("name", None)} + return self._dist or Distribution(attrs) + + def _process_field(self, container: dict, field: str, fn: Callable): + if field in container: + with _ignore_errors(self.ignore_option_errors): + container[field] = fn(container[field]) + + def _canonic_package_data(self, field="package-data"): + package_data = self.setuptools_cfg.get(field, {}) + return _expand.canonic_package_data(package_data) + + def expand(self): + self._expand_packages() + self._canonic_package_data() + self._canonic_package_data("exclude-package-data") + + # A distribution object is required for discovering the correct package_dir + dist = self._ensure_dist() + + with _EnsurePackagesDiscovered(dist, self.setuptools_cfg) as ensure_discovered: + package_dir = ensure_discovered.package_dir + self._expand_data_files() + self._expand_cmdclass(package_dir) + self._expand_all_dynamic(dist, package_dir) + + return self.config + + def _expand_packages(self): + packages = self.setuptools_cfg.get("packages") + if packages is None or isinstance(packages, (list, tuple)): + return + + find = packages.get("find") + if isinstance(find, dict): + find["root_dir"] = self.root_dir + find["fill_package_dir"] = self.setuptools_cfg.setdefault("package-dir", {}) + with _ignore_errors(self.ignore_option_errors): + self.setuptools_cfg["packages"] = _expand.find_packages(**find) + + def _expand_data_files(self): + data_files = partial(_expand.canonic_data_files, root_dir=self.root_dir) + self._process_field(self.setuptools_cfg, "data-files", data_files) + + def _expand_cmdclass(self, package_dir: Mapping[str, str]): + root_dir = self.root_dir + cmdclass = partial(_expand.cmdclass, package_dir=package_dir, root_dir=root_dir) + self._process_field(self.setuptools_cfg, "cmdclass", cmdclass) + + def _expand_all_dynamic(self, dist: "Distribution", package_dir: Mapping[str, str]): + special = ( # need special handling + "version", + "readme", + "entry-points", + "scripts", + "gui-scripts", + "classifiers", + ) + # `_obtain` functions are assumed to raise appropriate exceptions/warnings. + obtained_dynamic = { + field: self._obtain(dist, field, package_dir) + for field in self.dynamic + if field not in special + } + obtained_dynamic.update( + self._obtain_entry_points(dist, package_dir) or {}, + version=self._obtain_version(dist, package_dir), + readme=self._obtain_readme(dist), + classifiers=self._obtain_classifiers(dist), + ) + # `None` indicates there is nothing in `tool.setuptools.dynamic` but the value + # might have already been set by setup.py/extensions, so avoid overwriting. + updates = {k: v for k, v in obtained_dynamic.items() if v is not None} + self.project_cfg.update(updates) + + def _ensure_previously_set(self, dist: "Distribution", field: str): + previous = _PREVIOUSLY_DEFINED[field](dist) + if previous is None and not self.ignore_option_errors: + msg = ( + f"No configuration found for dynamic {field!r}.\n" + "Some dynamic fields need to be specified via `tool.setuptools.dynamic`" + "\nothers must be specified via the equivalent attribute in `setup.py`." + ) + raise OptionError(msg) + + def _obtain(self, dist: "Distribution", field: str, package_dir: Mapping[str, str]): + if field in self.dynamic_cfg: + directive = self.dynamic_cfg[field] + with _ignore_errors(self.ignore_option_errors): + root_dir = self.root_dir + if "file" in directive: + return _expand.read_files(directive["file"], root_dir) + if "attr" in directive: + return _expand.read_attr(directive["attr"], package_dir, root_dir) + msg = f"invalid `tool.setuptools.dynamic.{field}`: {directive!r}" + raise ValueError(msg) + return None + self._ensure_previously_set(dist, field) + return None + + def _obtain_version(self, dist: "Distribution", package_dir: Mapping[str, str]): + # Since plugins can set version, let's silently skip if it cannot be obtained + if "version" in self.dynamic and "version" in self.dynamic_cfg: + return _expand.version(self._obtain(dist, "version", package_dir)) + return None + + def _obtain_readme(self, dist: "Distribution") -> Optional[Dict[str, str]]: + if "readme" not in self.dynamic: + return None + + dynamic_cfg = self.dynamic_cfg + if "readme" in dynamic_cfg: + return { + "text": self._obtain(dist, "readme", {}), + "content-type": dynamic_cfg["readme"].get("content-type", "text/x-rst"), + } + + self._ensure_previously_set(dist, "readme") + return None + + def _obtain_entry_points( + self, dist: "Distribution", package_dir: Mapping[str, str] + ) -> Optional[Dict[str, dict]]: + fields = ("entry-points", "scripts", "gui-scripts") + if not any(field in self.dynamic for field in fields): + return None + + text = self._obtain(dist, "entry-points", package_dir) + if text is None: + return None + + groups = _expand.entry_points(text) + expanded = {"entry-points": groups} + + def _set_scripts(field: str, group: str): + if group in groups: + value = groups.pop(group) + if field not in self.dynamic: + msg = _WouldIgnoreField.message(field, value) + warnings.warn(msg, _WouldIgnoreField) + # TODO: Don't set field when support for pyproject.toml stabilizes + # instead raise an error as specified in PEP 621 + expanded[field] = value + + _set_scripts("scripts", "console_scripts") + _set_scripts("gui-scripts", "gui_scripts") + + return expanded + + def _obtain_classifiers(self, dist: "Distribution"): + if "classifiers" in self.dynamic: + value = self._obtain(dist, "classifiers", {}) + if value: + return value.splitlines() + return None + + +@contextmanager +def _ignore_errors(ignore_option_errors: bool): + if not ignore_option_errors: + yield + return + + try: + yield + except Exception as ex: + _logger.debug(f"ignored error: {ex.__class__.__name__} - {ex}") + + +class _EnsurePackagesDiscovered(_expand.EnsurePackagesDiscovered): + def __init__(self, distribution: "Distribution", setuptools_cfg: dict): + super().__init__(distribution) + self._setuptools_cfg = setuptools_cfg + + def __enter__(self): + """When entering the context, the values of ``packages``, ``py_modules`` and + ``package_dir`` that are missing in ``dist`` are copied from ``setuptools_cfg``. + """ + dist, cfg = self._dist, self._setuptools_cfg + package_dir: Dict[str, str] = cfg.setdefault("package-dir", {}) + package_dir.update(dist.package_dir or {}) + dist.package_dir = package_dir # needs to be the same object + + dist.set_defaults._ignore_ext_modules() # pyproject.toml-specific behaviour + + # Set `py_modules` and `packages` in dist to short-circuit auto-discovery, + # but avoid overwriting empty lists purposefully set by users. + if dist.py_modules is None: + dist.py_modules = cfg.get("py-modules") + if dist.packages is None: + dist.packages = cfg.get("packages") + + return super().__enter__() + + def __exit__(self, exc_type, exc_value, traceback): + """When exiting the context, if values of ``packages``, ``py_modules`` and + ``package_dir`` are missing in ``setuptools_cfg``, copy from ``dist``. + """ + # If anything was discovered set them back, so they count in the final config. + self._setuptools_cfg.setdefault("packages", self._dist.packages) + self._setuptools_cfg.setdefault("py-modules", self._dist.py_modules) + return super().__exit__(exc_type, exc_value, traceback) + + +class _ExperimentalProjectMetadata(UserWarning): + """Explicitly inform users that `pyproject.toml` configuration is experimental""" + + +class _InvalidFile(UserWarning): + """Inform users that the given `pyproject.toml` is experimental: + !!\n\n + ############################ + # Invalid `pyproject.toml` # + ############################ + + Any configurations in `pyproject.toml` will be ignored. + Please note that future releases of setuptools will halt the build process + if an invalid file is given. + + To prevent setuptools from considering `pyproject.toml` please + DO NOT include the `[project]` or `[tool.setuptools]` tables in your file. + \n\n!! + """ + + @classmethod + def message(cls): + from inspect import cleandoc + msg = "\n".join(cls.__doc__.splitlines()[1:]) + return cleandoc(msg) diff --git a/setuptools/config.py b/setuptools/config/setupcfg.py index b4e968e5..d485a8bb 100644 --- a/setuptools/config.py +++ b/setuptools/config/setupcfg.py @@ -1,63 +1,40 @@ -import ast -import io +"""Load setuptools configuration from ``setup.cfg`` files""" import os -import sys import warnings import functools -import importlib from collections import defaultdict from functools import partial from functools import wraps -from glob import iglob -import contextlib +from typing import (TYPE_CHECKING, Callable, Any, Dict, Generic, Iterable, List, + Optional, Tuple, TypeVar, Union) from distutils.errors import DistutilsOptionError, DistutilsFileError from setuptools.extern.packaging.version import Version, InvalidVersion from setuptools.extern.packaging.specifiers import SpecifierSet +from . import expand -class StaticModule: - """ - Attempt to load the module by the name - """ - - def __init__(self, name): - spec = importlib.util.find_spec(name) - with open(spec.origin) as strm: - src = strm.read() - module = ast.parse(src) - vars(self).update(locals()) - del self.self - - def __getattr__(self, attr): - try: - return next( - ast.literal_eval(statement.value) - for statement in self.module.body - if isinstance(statement, ast.Assign) - for target in statement.targets - if isinstance(target, ast.Name) and target.id == attr - ) - except Exception as e: - raise AttributeError( - "{self.name} has no attribute {attr}".format(**locals()) - ) from e +if TYPE_CHECKING: + from setuptools.dist import Distribution # noqa + from distutils.dist import DistributionMetadata # noqa - -@contextlib.contextmanager -def patch_path(path): - """ - Add path to front of sys.path for the duration of the context. - """ - try: - sys.path.insert(0, path) - yield - finally: - sys.path.remove(path) +_Path = Union[str, os.PathLike] +SingleCommandOptions = Dict["str", Tuple["str", Any]] +"""Dict that associate the name of the options of a particular command to a +tuple. The first element of the tuple indicates the origin of the option value +(e.g. the name of the configuration file where it was read from), +while the second element of the tuple is the option value itself +""" +AllCommandOptions = Dict["str", SingleCommandOptions] # cmd name => its options +Target = TypeVar("Target", bound=Union["Distribution", "DistributionMetadata"]) -def read_configuration(filepath, find_others=False, ignore_option_errors=False): +def read_configuration( + filepath: _Path, + find_others=False, + ignore_option_errors=False +) -> dict: """Read given configuration file and returns options from it as a dict. :param str|unicode filepath: Path to configuration file @@ -73,7 +50,30 @@ def read_configuration(filepath, find_others=False, ignore_option_errors=False): :rtype: dict """ - from setuptools.dist import Distribution, _Distribution + from setuptools.dist import Distribution + + dist = Distribution() + filenames = dist.find_config_files() if find_others else [] + handlers = _apply(dist, filepath, filenames, ignore_option_errors) + return configuration_to_dict(handlers) + + +def apply_configuration(dist: "Distribution", filepath: _Path) -> "Distribution": + """Apply the configuration from a ``setup.cfg`` file into an existing + distribution object. + """ + _apply(dist, filepath) + dist._finalize_requires() + return dist + + +def _apply( + dist: "Distribution", filepath: _Path, + other_files: Iterable[_Path] = (), + ignore_option_errors: bool = False, +) -> Tuple["ConfigHandler", ...]: + """Read configuration from ``filepath`` and applies to the ``dist`` object.""" + from setuptools.dist import _Distribution filepath = os.path.abspath(filepath) @@ -82,27 +82,21 @@ def read_configuration(filepath, find_others=False, ignore_option_errors=False): current_directory = os.getcwd() os.chdir(os.path.dirname(filepath)) + filenames = [*other_files, filepath] try: - dist = Distribution() - - filenames = dist.find_config_files() if find_others else [] - if filepath not in filenames: - filenames.append(filepath) - _Distribution.parse_config_files(dist, filenames=filenames) - handlers = parse_configuration( dist, dist.command_options, ignore_option_errors=ignore_option_errors ) - + dist._finalize_license_files() finally: os.chdir(current_directory) - return configuration_to_dict(handlers) + return handlers -def _get_option(target_obj, key): +def _get_option(target_obj: Target, key: str): """ Given a target object and option key, get that option from the target object, either through a get_{key} method or @@ -114,7 +108,7 @@ def _get_option(target_obj, key): return getter() -def configuration_to_dict(handlers): +def configuration_to_dict(handlers: Tuple["ConfigHandler", ...]) -> dict: """Returns configuration data gathered by given handlers as a dict. :param list[ConfigHandler] handlers: Handlers list, @@ -122,7 +116,7 @@ def configuration_to_dict(handlers): :rtype: dict """ - config_dict = defaultdict(dict) + config_dict: dict = defaultdict(dict) for handler in handlers: for option in handler.set_options: @@ -132,7 +126,11 @@ def configuration_to_dict(handlers): return config_dict -def parse_configuration(distribution, command_options, ignore_option_errors=False): +def parse_configuration( + distribution: "Distribution", + command_options: AllCommandOptions, + ignore_option_errors=False +) -> Tuple["ConfigMetadataHandler", "ConfigOptionsHandler"]: """Performs additional parsing of configuration options for a distribution. @@ -146,38 +144,55 @@ def parse_configuration(distribution, command_options, ignore_option_errors=Fals If False exceptions are propagated as expected. :rtype: list """ - options = ConfigOptionsHandler(distribution, command_options, ignore_option_errors) - options.parse() + with expand.EnsurePackagesDiscovered(distribution) as ensure_discovered: + options = ConfigOptionsHandler( + distribution, + command_options, + ignore_option_errors, + ensure_discovered, + ) - meta = ConfigMetadataHandler( - distribution.metadata, - command_options, - ignore_option_errors, - distribution.package_dir, - ) - meta.parse() + options.parse() + if not distribution.package_dir: + distribution.package_dir = options.package_dir # Filled by `find_packages` + + meta = ConfigMetadataHandler( + distribution.metadata, + command_options, + ignore_option_errors, + ensure_discovered, + distribution.package_dir, + distribution.src_root, + ) + meta.parse() return meta, options -class ConfigHandler: +class ConfigHandler(Generic[Target]): """Handles metadata supplied in configuration files.""" - section_prefix = None + section_prefix: str """Prefix for config sections handled by this handler. Must be provided by class heirs. """ - aliases = {} + aliases: Dict[str, str] = {} """Options aliases. For compatibility with various packages. E.g.: d2to1 and pbr. Note: `-` in keys is replaced with `_` by config parser. """ - def __init__(self, target_obj, options, ignore_option_errors=False): - sections = {} + def __init__( + self, + target_obj: Target, + options: AllCommandOptions, + ignore_option_errors, + ensure_discovered: expand.EnsurePackagesDiscovered, + ): + sections: AllCommandOptions = {} section_prefix = self.section_prefix for section_name, section_options in options.items(): @@ -190,7 +205,8 @@ class ConfigHandler: self.ignore_option_errors = ignore_option_errors self.target_obj = target_obj self.sections = sections - self.set_options = [] + self.set_options: List[str] = [] + self.ensure_discovered = ensure_discovered @property def parsers(self): @@ -258,34 +274,6 @@ class ConfigHandler: return [chunk.strip() for chunk in value if chunk.strip()] @classmethod - def _parse_list_glob(cls, value, separator=','): - """Equivalent to _parse_list() but expands any glob patterns using glob(). - - However, unlike with glob() calls, the results remain relative paths. - - :param value: - :param separator: List items separator character. - :rtype: list - """ - glob_characters = ('*', '?', '[', ']', '{', '}') - values = cls._parse_list(value, separator=separator) - expanded_values = [] - for value in values: - - # Has globby characters? - if any(char in value for char in glob_characters): - # then expand the glob pattern while keeping paths *relative*: - expanded_values.extend(sorted( - os.path.relpath(path, os.getcwd()) - for path in iglob(os.path.abspath(value)))) - - else: - # take the value as-is: - expanded_values.append(value) - - return expanded_values - - @classmethod def _parse_dict(cls, value): """Represents value as a dict. @@ -338,7 +326,7 @@ class ConfigHandler: return parser @classmethod - def _parse_file(cls, value): + def _parse_file(cls, value, root_dir: _Path): """Represents value as a string, allowing including text from nearest files using `file:` directive. @@ -360,25 +348,10 @@ class ConfigHandler: return value spec = value[len(include_directive) :] - filepaths = (os.path.abspath(path.strip()) for path in spec.split(',')) - return '\n'.join( - cls._read_file(path) - for path in filepaths - if (cls._assert_local(path) or True) and os.path.isfile(path) - ) + filepaths = (path.strip() for path in spec.split(',')) + return expand.read_files(filepaths, root_dir) - @staticmethod - def _assert_local(filepath): - if not filepath.startswith(os.getcwd()): - raise DistutilsOptionError('`file:` directive can not access %s' % filepath) - - @staticmethod - def _read_file(filepath): - with io.open(filepath, encoding='utf-8') as f: - return f.read() - - @classmethod - def _parse_attr(cls, value, package_dir=None): + def _parse_attr(self, value, package_dir, root_dir: _Path): """Represents value as a module attribute. Examples: @@ -392,36 +365,11 @@ class ConfigHandler: if not value.startswith(attr_directive): return value - attrs_path = value.replace(attr_directive, '').strip().split('.') - attr_name = attrs_path.pop() - - module_name = '.'.join(attrs_path) - module_name = module_name or '__init__' - - parent_path = os.getcwd() - if package_dir: - if attrs_path[0] in package_dir: - # A custom path was specified for the module we want to import - custom_path = package_dir[attrs_path[0]] - parts = custom_path.rsplit('/', 1) - if len(parts) > 1: - parent_path = os.path.join(os.getcwd(), parts[0]) - module_name = parts[1] - else: - module_name = custom_path - elif '' in package_dir: - # A custom parent directory was specified for all root modules - parent_path = os.path.join(os.getcwd(), package_dir['']) - - with patch_path(parent_path): - try: - # attempt to load value statically - return getattr(StaticModule(module_name), attr_name) - except Exception: - # fallback to simple import - module = importlib.import_module(module_name) + attr_desc = value.replace(attr_directive, '') - return getattr(module, attr_name) + # Make sure package_dir is populated correctly, so `attr:` directives can work + package_dir.update(self.ensure_discovered.package_dir) + return expand.read_attr(attr_desc, package_dir, root_dir) @classmethod def _get_parser_compound(cls, *parse_methods): @@ -482,7 +430,7 @@ class ConfigHandler: if section_name: # [section.option] variant method_postfix = '_%s' % section_name - section_parser_method = getattr( + section_parser_method: Optional[Callable] = getattr( self, # Dots in section names are translated into dunderscores. ('parse_section%s' % method_postfix).replace('.', '__'), @@ -513,7 +461,7 @@ class ConfigHandler: return config_handler -class ConfigMetadataHandler(ConfigHandler): +class ConfigMetadataHandler(ConfigHandler["DistributionMetadata"]): section_prefix = 'metadata' @@ -531,18 +479,23 @@ class ConfigMetadataHandler(ConfigHandler): """ def __init__( - self, target_obj, options, ignore_option_errors=False, package_dir=None + self, + target_obj: "DistributionMetadata", + options: AllCommandOptions, + ignore_option_errors: bool, + ensure_discovered: expand.EnsurePackagesDiscovered, + package_dir: Optional[dict] = None, + root_dir: _Path = os.curdir ): - super(ConfigMetadataHandler, self).__init__( - target_obj, options, ignore_option_errors - ) + super().__init__(target_obj, options, ignore_option_errors, ensure_discovered) self.package_dir = package_dir + self.root_dir = root_dir @property def parsers(self): """Metadata item name to parser function mapping.""" parse_list = self._parse_list - parse_file = self._parse_file + parse_file = partial(self._parse_file, root_dir=self.root_dir) parse_dict = self._parse_dict exclude_files_parser = self._exclude_files_parser @@ -579,7 +532,7 @@ class ConfigMetadataHandler(ConfigHandler): :rtype: str """ - version = self._parse_file(value) + version = self._parse_file(value, self.root_dir) if version != value: version = version.strip() @@ -596,24 +549,24 @@ class ConfigMetadataHandler(ConfigHandler): return version - version = self._parse_attr(value, self.package_dir) - - if callable(version): - version = version() - - if not isinstance(version, str): - if hasattr(version, '__iter__'): - version = '.'.join(map(str, version)) - else: - version = '%s' % version + return expand.version(self._parse_attr(value, self.package_dir, self.root_dir)) - return version - -class ConfigOptionsHandler(ConfigHandler): +class ConfigOptionsHandler(ConfigHandler["Distribution"]): section_prefix = 'options' + def __init__( + self, + target_obj: "Distribution", + options: AllCommandOptions, + ignore_option_errors: bool, + ensure_discovered: expand.EnsurePackagesDiscovered, + ): + super().__init__(target_obj, options, ignore_option_errors, ensure_discovered) + self.root_dir = target_obj.src_root + self.package_dir: Dict[str, str] = {} # To be filled by `find_packages` + @property def parsers(self): """Metadata item name to parser function mapping.""" @@ -622,6 +575,7 @@ class ConfigOptionsHandler(ConfigHandler): parse_bool = self._parse_bool parse_dict = self._parse_dict parse_cmdclass = self._parse_cmdclass + parse_file = partial(self._parse_file, root_dir=self.root_dir) return { 'zip_safe': parse_bool, @@ -635,23 +589,15 @@ class ConfigOptionsHandler(ConfigHandler): 'setup_requires': parse_list_semicolon, 'tests_require': parse_list_semicolon, 'packages': self._parse_packages, - 'entry_points': self._parse_file, + 'entry_points': parse_file, 'py_modules': parse_list, 'python_requires': SpecifierSet, 'cmdclass': parse_cmdclass, } def _parse_cmdclass(self, value): - def resolve_class(qualified_class_name): - idx = qualified_class_name.rfind('.') - class_name = qualified_class_name[idx + 1 :] - pkg_name = qualified_class_name[:idx] - - module = __import__(pkg_name) - - return getattr(module, class_name) - - return {k: resolve_class(v) for k, v in self._parse_dict(value).items()} + package_dir = self.ensure_discovered.package_dir + return expand.cmdclass(self._parse_dict(value), package_dir, self.root_dir) def _parse_packages(self, value): """Parses `packages` option value. @@ -665,19 +611,18 @@ class ConfigOptionsHandler(ConfigHandler): if trimmed_value not in find_directives: return self._parse_list(value) - findns = trimmed_value == find_directives[1] - # Read function arguments from a dedicated section. find_kwargs = self.parse_section_packages__find( self.sections.get('packages.find', {}) ) - if findns: - from setuptools import find_namespace_packages as find_packages - else: - from setuptools import find_packages + find_kwargs.update( + namespaces=(trimmed_value == find_directives[1]), + root_dir=self.root_dir, + fill_package_dir=self.package_dir, + ) - return find_packages(**find_kwargs) + return expand.find_packages(**find_kwargs) def parse_section_packages__find(self, section_options): """Parses `packages.find` configuration file section. @@ -709,14 +654,8 @@ class ConfigOptionsHandler(ConfigHandler): self['entry_points'] = parsed def _parse_package_data(self, section_options): - parsed = self._parse_section_to_dict(section_options, self._parse_list) - - root = parsed.get('*') - if root: - parsed[''] = root - del parsed['*'] - - return parsed + package_data = self._parse_section_to_dict(section_options, self._parse_list) + return expand.canonic_package_data(package_data) def parse_section_package_data(self, section_options): """Parses `package_data` configuration file section. @@ -738,14 +677,13 @@ class ConfigOptionsHandler(ConfigHandler): :param dict section_options: """ parse_list = partial(self._parse_list, separator=';') - self['extras_require'] = self._parse_section_to_dict( - section_options, parse_list - ) + parsed = self._parse_section_to_dict(section_options, parse_list) + self['extras_require'] = parsed def parse_section_data_files(self, section_options): """Parses `data_files` configuration file section. :param dict section_options: """ - parsed = self._parse_section_to_dict(section_options, self._parse_list_glob) - self['data_files'] = [(k, v) for k, v in parsed.items()] + parsed = self._parse_section_to_dict(section_options, self._parse_list) + self['data_files'] = expand.canonic_data_files(parsed, self.root_dir) diff --git a/setuptools/depends.py b/setuptools/depends.py index 8be6928a..adffd12d 100644 --- a/setuptools/depends.py +++ b/setuptools/depends.py @@ -2,7 +2,8 @@ import sys import marshal import contextlib import dis -from distutils.version import StrictVersion + +from setuptools.extern.packaging import version from ._imp import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE from . import _imp @@ -21,7 +22,7 @@ class Require: attribute=None, format=None): if format is None and requested_version is not None: - format = StrictVersion + format = version.Version if format is not None: requested_version = format(requested_version) @@ -40,7 +41,7 @@ class Require: def version_ok(self, version): """Is 'version' sufficiently up-to-date?""" return self.attribute is None or self.format is None or \ - str(version) != "unknown" and version >= self.requested_version + str(version) != "unknown" and self.format(version) >= self.requested_version def get_version(self, paths=None, default="unknown"): """Get version number of installed module, 'None', or 'default' @@ -78,7 +79,7 @@ class Require: version = self.get_version(paths) if version is None: return False - return self.version_ok(version) + return self.version_ok(str(version)) def maybe_close(f): diff --git a/setuptools/discovery.py b/setuptools/discovery.py new file mode 100644 index 00000000..95c3c7f8 --- /dev/null +++ b/setuptools/discovery.py @@ -0,0 +1,588 @@ +"""Automatic discovery of Python modules and packages (for inclusion in the +distribution) and other config values. + +For the purposes of this module, the following nomenclature is used: + +- "src-layout": a directory representing a Python project that contains a "src" + folder. Everything under the "src" folder is meant to be included in the + distribution when packaging the project. Example:: + + . + ├── tox.ini + ├── pyproject.toml + └── src/ + └── mypkg/ + ├── __init__.py + ├── mymodule.py + └── my_data_file.txt + +- "flat-layout": a Python project that does not use "src-layout" but instead + have a directory under the project root for each package:: + + . + ├── tox.ini + ├── pyproject.toml + └── mypkg/ + ├── __init__.py + ├── mymodule.py + └── my_data_file.txt + +- "single-module": a project that contains a single Python script direct under + the project root (no directory used):: + + . + ├── tox.ini + ├── pyproject.toml + └── mymodule.py + +""" + +import itertools +import os +from fnmatch import fnmatchcase +from glob import glob +from pathlib import Path +from typing import TYPE_CHECKING +from typing import Callable, Dict, Iterator, Iterable, List, Optional, Tuple, Union + +import _distutils_hack.override # noqa: F401 + +from distutils import log +from distutils.util import convert_path + +_Path = Union[str, os.PathLike] +_Filter = Callable[[str], bool] +StrIter = Iterator[str] + +chain_iter = itertools.chain.from_iterable + +if TYPE_CHECKING: + from setuptools import Distribution # noqa + + +def _valid_name(path: _Path) -> bool: + # Ignore invalid names that cannot be imported directly + return os.path.basename(path).isidentifier() + + +class _Finder: + """Base class that exposes functionality for module/package finders""" + + ALWAYS_EXCLUDE: Tuple[str, ...] = () + DEFAULT_EXCLUDE: Tuple[str, ...] = () + + @classmethod + def find( + cls, + where: _Path = '.', + exclude: Iterable[str] = (), + include: Iterable[str] = ('*',) + ) -> List[str]: + """Return a list of all Python items (packages or modules, depending on + the finder implementation) found within directory 'where'. + + 'where' is the root directory which will be searched. + It should be supplied as a "cross-platform" (i.e. URL-style) path; + it will be converted to the appropriate local path syntax. + + 'exclude' is a sequence of names to exclude; '*' can be used + as a wildcard in the names. + When finding packages, 'foo.*' will exclude all subpackages of 'foo' + (but not 'foo' itself). + + 'include' is a sequence of names to include. + If it's specified, only the named items will be included. + If it's not specified, all found items will be included. + 'include' can contain shell style wildcard patterns just like + 'exclude'. + """ + + exclude = exclude or cls.DEFAULT_EXCLUDE + return list( + cls._find_iter( + convert_path(str(where)), + cls._build_filter(*cls.ALWAYS_EXCLUDE, *exclude), + cls._build_filter(*include), + ) + ) + + @classmethod + def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter: + raise NotImplementedError + + @staticmethod + def _build_filter(*patterns: str) -> _Filter: + """ + Given a list of patterns, return a callable that will be true only if + the input matches at least one of the patterns. + """ + return lambda name: any(fnmatchcase(name, pat) for pat in patterns) + + +class PackageFinder(_Finder): + """ + Generate a list of all Python packages found within a directory + """ + + ALWAYS_EXCLUDE = ("ez_setup", "*__pycache__") + + @classmethod + def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter: + """ + All the packages found in 'where' that pass the 'include' filter, but + not the 'exclude' filter. + """ + for root, dirs, files in os.walk(str(where), followlinks=True): + # Copy dirs to iterate over it, then empty dirs. + all_dirs = dirs[:] + dirs[:] = [] + + for dir in all_dirs: + full_path = os.path.join(root, dir) + rel_path = os.path.relpath(full_path, where) + package = rel_path.replace(os.path.sep, '.') + + # Skip directory trees that are not valid packages + if '.' in dir or not cls._looks_like_package(full_path, package): + continue + + # Should this package be included? + if include(package) and not exclude(package): + yield package + + # Keep searching subdirectories, as there may be more packages + # down there, even if the parent was excluded. + dirs.append(dir) + + @staticmethod + def _looks_like_package(path: _Path, _package_name: str) -> bool: + """Does a directory look like a package?""" + return os.path.isfile(os.path.join(path, '__init__.py')) + + +class PEP420PackageFinder(PackageFinder): + @staticmethod + def _looks_like_package(_path: _Path, _package_name: str) -> bool: + return True + + +class ModuleFinder(_Finder): + """Find isolated Python modules. + This function will **not** recurse subdirectories. + """ + + @classmethod + def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter: + for file in glob(os.path.join(where, "*.py")): + module, _ext = os.path.splitext(os.path.basename(file)) + + if not cls._looks_like_module(module): + continue + + if include(module) and not exclude(module): + yield module + + _looks_like_module = staticmethod(_valid_name) + + +# We have to be extra careful in the case of flat layout to not include files +# and directories not meant for distribution (e.g. tool-related) + + +class FlatLayoutPackageFinder(PEP420PackageFinder): + _EXCLUDE = ( + "ci", + "bin", + "doc", + "docs", + "documentation", + "manpages", + "news", + "changelog", + "test", + "tests", + "unit_test", + "unit_tests", + "example", + "examples", + "scripts", + "tools", + "util", + "utils", + "python", + "build", + "dist", + "venv", + "env", + "requirements", + # ---- Task runners / Build tools ---- + "tasks", # invoke + "fabfile", # fabric + "site_scons", # SCons + # ---- Other tools ---- + "benchmark", + "benchmarks", + "exercise", + "exercises", + # ---- Hidden directories/Private packages ---- + "[._]*", + ) + + DEFAULT_EXCLUDE = tuple(chain_iter((p, f"{p}.*") for p in _EXCLUDE)) + """Reserved package names""" + + @staticmethod + def _looks_like_package(_path: _Path, package_name: str) -> bool: + names = package_name.split('.') + # Consider PEP 561 + root_pkg_is_valid = names[0].isidentifier() or names[0].endswith("-stubs") + return root_pkg_is_valid and all(name.isidentifier() for name in names[1:]) + + +class FlatLayoutModuleFinder(ModuleFinder): + DEFAULT_EXCLUDE = ( + "setup", + "conftest", + "test", + "tests", + "example", + "examples", + "build", + # ---- Task runners ---- + "toxfile", + "noxfile", + "pavement", + "dodo", + "tasks", + "fabfile", + # ---- Other tools ---- + "[Ss][Cc]onstruct", # SCons + "conanfile", # Connan: C/C++ build tool + "manage", # Django + "benchmark", + "benchmarks", + "exercise", + "exercises", + # ---- Hidden files/Private modules ---- + "[._]*", + ) + """Reserved top-level module names""" + + +def _find_packages_within(root_pkg: str, pkg_dir: _Path) -> List[str]: + nested = PEP420PackageFinder.find(pkg_dir) + return [root_pkg] + [".".join((root_pkg, n)) for n in nested] + + +class ConfigDiscovery: + """Fill-in metadata and options that can be automatically derived + (from other metadata/options, the file system or conventions) + """ + + def __init__(self, distribution: "Distribution"): + self.dist = distribution + self._called = False + self._disabled = False + self._skip_ext_modules = False + + def _disable(self): + """Internal API to disable automatic discovery""" + self._disabled = True + + def _ignore_ext_modules(self): + """Internal API to disregard ext_modules. + + Normally auto-discovery would not be triggered if ``ext_modules`` are set + (this is done for backward compatibility with existing packages relying on + ``setup.py`` or ``setup.cfg``). However, ``setuptools`` can call this function + to ignore given ``ext_modules`` and proceed with the auto-discovery if + ``packages`` and ``py_modules`` are not given (e.g. when using pyproject.toml + metadata). + """ + self._skip_ext_modules = True + + @property + def _root_dir(self) -> _Path: + # The best is to wait until `src_root` is set in dist, before using _root_dir. + return self.dist.src_root or os.curdir + + @property + def _package_dir(self) -> Dict[str, str]: + if self.dist.package_dir is None: + return {} + return self.dist.package_dir + + def __call__(self, force=False, name=True, ignore_ext_modules=False): + """Automatically discover missing configuration fields + and modifies the given ``distribution`` object in-place. + + Note that by default this will only have an effect the first time the + ``ConfigDiscovery`` object is called. + + To repeatedly invoke automatic discovery (e.g. when the project + directory changes), please use ``force=True`` (or create a new + ``ConfigDiscovery`` instance). + """ + if force is False and (self._called or self._disabled): + # Avoid overhead of multiple calls + return + + self._analyse_package_layout(ignore_ext_modules) + if name: + self.analyse_name() # depends on ``packages`` and ``py_modules`` + + self._called = True + + def _explicitly_specified(self, ignore_ext_modules: bool) -> bool: + """``True`` if the user has specified some form of package/module listing""" + ignore_ext_modules = ignore_ext_modules or self._skip_ext_modules + ext_modules = not (self.dist.ext_modules is None or ignore_ext_modules) + return ( + self.dist.packages is not None + or self.dist.py_modules is not None + or ext_modules + or hasattr(self.dist, "configuration") and self.dist.configuration + # ^ Some projects use numpy.distutils.misc_util.Configuration + ) + + def _analyse_package_layout(self, ignore_ext_modules: bool) -> bool: + if self._explicitly_specified(ignore_ext_modules): + # For backward compatibility, just try to find modules/packages + # when nothing is given + return True + + log.debug( + "No `packages` or `py_modules` configuration, performing " + "automatic discovery." + ) + + return ( + self._analyse_explicit_layout() + or self._analyse_src_layout() + # flat-layout is the trickiest for discovery so it should be last + or self._analyse_flat_layout() + ) + + def _analyse_explicit_layout(self) -> bool: + """The user can explicitly give a package layout via ``package_dir``""" + package_dir = self._package_dir.copy() # don't modify directly + package_dir.pop("", None) # This falls under the "src-layout" umbrella + root_dir = self._root_dir + + if not package_dir: + return False + + log.debug(f"`explicit-layout` detected -- analysing {package_dir}") + pkgs = chain_iter( + _find_packages_within(pkg, os.path.join(root_dir, parent_dir)) + for pkg, parent_dir in package_dir.items() + ) + self.dist.packages = list(pkgs) + log.debug(f"discovered packages -- {self.dist.packages}") + return True + + def _analyse_src_layout(self) -> bool: + """Try to find all packages or modules under the ``src`` directory + (or anything pointed by ``package_dir[""]``). + + The "src-layout" is relatively safe for automatic discovery. + We assume that everything within is meant to be included in the + distribution. + + If ``package_dir[""]`` is not given, but the ``src`` directory exists, + this function will set ``package_dir[""] = "src"``. + """ + package_dir = self._package_dir + src_dir = os.path.join(self._root_dir, package_dir.get("", "src")) + if not os.path.isdir(src_dir): + return False + + log.debug(f"`src-layout` detected -- analysing {src_dir}") + package_dir.setdefault("", os.path.basename(src_dir)) + self.dist.package_dir = package_dir # persist eventual modifications + self.dist.packages = PEP420PackageFinder.find(src_dir) + self.dist.py_modules = ModuleFinder.find(src_dir) + log.debug(f"discovered packages -- {self.dist.packages}") + log.debug(f"discovered py_modules -- {self.dist.py_modules}") + return True + + def _analyse_flat_layout(self) -> bool: + """Try to find all packages and modules under the project root. + + Since the ``flat-layout`` is more dangerous in terms of accidentally including + extra files/directories, this function is more conservative and will raise an + error if multiple packages or modules are found. + + This assumes that multi-package dists are uncommon and refuse to support that + use case in order to be able to prevent unintended errors. + """ + log.debug(f"`flat-layout` detected -- analysing {self._root_dir}") + return self._analyse_flat_packages() or self._analyse_flat_modules() + + def _analyse_flat_packages(self) -> bool: + self.dist.packages = FlatLayoutPackageFinder.find(self._root_dir) + top_level = remove_nested_packages(remove_stubs(self.dist.packages)) + log.debug(f"discovered packages -- {self.dist.packages}") + self._ensure_no_accidental_inclusion(top_level, "packages") + return bool(top_level) + + def _analyse_flat_modules(self) -> bool: + self.dist.py_modules = FlatLayoutModuleFinder.find(self._root_dir) + log.debug(f"discovered py_modules -- {self.dist.py_modules}") + self._ensure_no_accidental_inclusion(self.dist.py_modules, "modules") + return bool(self.dist.py_modules) + + def _ensure_no_accidental_inclusion(self, detected: List[str], kind: str): + if len(detected) > 1: + from inspect import cleandoc + from setuptools.errors import PackageDiscoveryError + + msg = f"""Multiple top-level {kind} discovered in a flat-layout: {detected}. + + To avoid accidental inclusion of unwanted files or directories, + setuptools will not proceed with this build. + + If you are trying to create a single distribution with multiple {kind} + on purpose, you should not rely on automatic discovery. + Instead, consider the following options: + + 1. set up custom discovery (`find` directive with `include` or `exclude`) + 2. use a `src-layout` + 3. explicitly set `py_modules` or `packages` with a list of names + + To find more information, look for "package discovery" on setuptools docs. + """ + raise PackageDiscoveryError(cleandoc(msg)) + + def analyse_name(self): + """The packages/modules are the essential contribution of the author. + Therefore the name of the distribution can be derived from them. + """ + if self.dist.metadata.name or self.dist.name: + # get_name() is not reliable (can return "UNKNOWN") + return None + + log.debug("No `name` configuration, performing automatic discovery") + + name = ( + self._find_name_single_package_or_module() + or self._find_name_from_packages() + ) + if name: + self.dist.metadata.name = name + self.dist.name = name + + def _find_name_single_package_or_module(self) -> Optional[str]: + """Exactly one module or package""" + for field in ('packages', 'py_modules'): + items = getattr(self.dist, field, None) or [] + if items and len(items) == 1: + log.debug(f"Single module/package detected, name: {items[0]}") + return items[0] + + return None + + def _find_name_from_packages(self) -> Optional[str]: + """Try to find the root package that is not a PEP 420 namespace""" + if not self.dist.packages: + return None + + packages = remove_stubs(sorted(self.dist.packages, key=len)) + package_dir = self.dist.package_dir or {} + + parent_pkg = find_parent_package(packages, package_dir, self._root_dir) + if parent_pkg: + log.debug(f"Common parent package detected, name: {parent_pkg}") + return parent_pkg + + log.warn("No parent package detected, impossible to derive `name`") + return None + + +def remove_nested_packages(packages: List[str]) -> List[str]: + """Remove nested packages from a list of packages. + + >>> remove_nested_packages(["a", "a.b1", "a.b2", "a.b1.c1"]) + ['a'] + >>> remove_nested_packages(["a", "b", "c.d", "c.d.e.f", "g.h", "a.a1"]) + ['a', 'b', 'c.d', 'g.h'] + """ + pkgs = sorted(packages, key=len) + top_level = pkgs[:] + size = len(pkgs) + for i, name in enumerate(reversed(pkgs)): + if any(name.startswith(f"{other}.") for other in top_level): + top_level.pop(size - i - 1) + + return top_level + + +def remove_stubs(packages: List[str]) -> List[str]: + """Remove type stubs (:pep:`561`) from a list of packages. + + >>> remove_stubs(["a", "a.b", "a-stubs", "a-stubs.b.c", "b", "c-stubs"]) + ['a', 'a.b', 'b'] + """ + return [pkg for pkg in packages if not pkg.split(".")[0].endswith("-stubs")] + + +def find_parent_package( + packages: List[str], package_dir: Dict[str, str], root_dir: _Path +) -> Optional[str]: + """Find the parent package that is not a namespace.""" + packages = sorted(packages, key=len) + common_ancestors = [] + for i, name in enumerate(packages): + if not all(n.startswith(f"{name}.") for n in packages[i+1:]): + # Since packages are sorted by length, this condition is able + # to find a list of all common ancestors. + # When there is divergence (e.g. multiple root packages) + # the list will be empty + break + common_ancestors.append(name) + + for name in common_ancestors: + pkg_path = find_package_path(name, package_dir, root_dir) + init = os.path.join(pkg_path, "__init__.py") + if os.path.isfile(init): + return name + + return None + + +def find_package_path(name: str, package_dir: Dict[str, str], root_dir: _Path) -> str: + """Given a package name, return the path where it should be found on + disk, considering the ``package_dir`` option. + + >>> path = find_package_path("my.pkg", {"": "root/is/nested"}, ".") + >>> path.replace(os.sep, "/") + './root/is/nested/my/pkg' + + >>> path = find_package_path("my.pkg", {"my": "root/is/nested"}, ".") + >>> path.replace(os.sep, "/") + './root/is/nested/pkg' + + >>> path = find_package_path("my.pkg", {"my.pkg": "root/is/nested"}, ".") + >>> path.replace(os.sep, "/") + './root/is/nested' + + >>> path = find_package_path("other.pkg", {"my.pkg": "root/is/nested"}, ".") + >>> path.replace(os.sep, "/") + './other/pkg' + """ + parts = name.split(".") + for i in range(len(parts), 0, -1): + # Look backwards, the most specific package_dir first + partial_name = ".".join(parts[:i]) + if partial_name in package_dir: + parent = package_dir[partial_name] + return os.path.join(root_dir, parent, *parts[i:]) + + parent = package_dir.get("") or "" + return os.path.join(root_dir, *parent.split("/"), *parts) + + +def construct_package_dir(packages: List[str], package_path: _Path) -> Dict[str, str]: + parent_pkgs = remove_nested_packages(packages) + prefix = Path(package_path).parts + return {pkg: "/".join([*prefix, *pkg.split(".")]) for pkg in parent_pkgs} diff --git a/setuptools/dist.py b/setuptools/dist.py index 848d6b0f..5507167d 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -19,17 +19,20 @@ from glob import iglob import itertools import textwrap from typing import List, Optional, TYPE_CHECKING +from pathlib import Path from collections import defaultdict from email import message_from_file from distutils.errors import DistutilsOptionError, DistutilsSetupError from distutils.util import rfc822_escape -from distutils.version import StrictVersion from setuptools.extern import packaging from setuptools.extern import ordered_set -from setuptools.extern.more_itertools import unique_everseen +from setuptools.extern.more_itertools import unique_everseen, partition +from setuptools.extern import nspektr + +from ._importlib import metadata from . import SetuptoolsDeprecationWarning @@ -37,8 +40,13 @@ import setuptools import setuptools.command from setuptools import windows_support from setuptools.monkey import get_unpatched -from setuptools.config import parse_configuration +from setuptools.config import setupcfg, pyprojecttoml +from setuptools.discovery import ConfigDiscovery + import pkg_resources +from setuptools.extern.packaging import version +from . import _reqs +from . import _entry_points if TYPE_CHECKING: from email.message import Message @@ -55,7 +63,7 @@ def _get_unpatched(cls): def get_metadata_version(self): mv = getattr(self, 'metadata_version', None) if mv is None: - mv = StrictVersion('2.1') + mv = version.Version('2.1') self.metadata_version = mv return mv @@ -94,7 +102,7 @@ def _read_list_from_msg(msg: "Message", field: str) -> Optional[List[str]]: def _read_payload_from_msg(msg: "Message") -> Optional[str]: value = msg.get_payload().strip() - if value == 'UNKNOWN': + if value == 'UNKNOWN' or not value: return None return value @@ -103,7 +111,7 @@ def read_pkg_file(self, file): """Reads the metadata values from a file object.""" msg = message_from_file(file) - self.metadata_version = StrictVersion(msg['metadata-version']) + self.metadata_version = version.Version(msg['metadata-version']) self.name = _read_field_from_msg(msg, 'name') self.version = _read_field_from_msg(msg, 'version') self.description = _read_field_from_msg(msg, 'summary') @@ -113,15 +121,14 @@ def read_pkg_file(self, file): self.author_email = _read_field_from_msg(msg, 'author-email') self.maintainer_email = None self.url = _read_field_from_msg(msg, 'home-page') + self.download_url = _read_field_from_msg(msg, 'download-url') self.license = _read_field_unescaped_from_msg(msg, 'license') - if 'download-url' in msg: - self.download_url = _read_field_from_msg(msg, 'download-url') - else: - self.download_url = None - self.long_description = _read_field_unescaped_from_msg(msg, 'description') - if self.long_description is None and self.metadata_version >= StrictVersion('2.1'): + if ( + self.long_description is None and + self.metadata_version >= version.Version('2.1') + ): self.long_description = _read_payload_from_msg(msg) self.description = _read_field_from_msg(msg, 'summary') @@ -132,7 +139,7 @@ def read_pkg_file(self, file): self.classifiers = _read_list_from_msg(msg, 'classifier') # PEP 314 - these fields only exist in 1.1 - if self.metadata_version == StrictVersion('1.1'): + if self.metadata_version == version.Version('1.1'): self.requires = _read_list_from_msg(msg, 'requires') self.provides = _read_list_from_msg(msg, 'provides') self.obsoletes = _read_list_from_msg(msg, 'obsoletes') @@ -145,11 +152,14 @@ def read_pkg_file(self, file): def single_line(val): - """Validate that the value does not have line breaks.""" - # Ref: https://github.com/pypa/setuptools/issues/1390 + """ + Quick and dirty validation for Summary pypa/setuptools#1390. + """ if '\n' in val: - raise ValueError('Newlines are not allowed') - + # TODO: Replace with `raise ValueError("newlines not allowed")` + # after reviewing #2893. + warnings.warn("newlines not allowed and will break in the future") + val = val.strip().split('\n')[0] return val @@ -164,10 +174,14 @@ def write_pkg_file(self, file): # noqa: C901 # is too complex (14) # FIXME write_field('Metadata-Version', str(version)) write_field('Name', self.get_name()) write_field('Version', self.get_version()) - write_field('Summary', single_line(self.get_description())) - write_field('Home-page', self.get_url()) + + summary = self.get_description() + if summary: + write_field('Summary', single_line(summary)) optional_fields = ( + ('Home-page', 'url'), + ('Download-URL', 'download_url'), ('Author', 'author'), ('Author-email', 'author_email'), ('Maintainer', 'maintainer'), @@ -179,10 +193,10 @@ def write_pkg_file(self, file): # noqa: C901 # is too complex (14) # FIXME if attr_val is not None: write_field(field, attr_val) - license = rfc822_escape(self.get_license()) - write_field('License', license) - if self.download_url: - write_field('Download-URL', self.download_url) + license = self.get_license() + if license: + write_field('License', rfc822_escape(license)) + for project_url in self.project_urls.items(): write_field('Project-URL', '%s, %s' % project_url) @@ -190,7 +204,8 @@ def write_pkg_file(self, file): # noqa: C901 # is too complex (14) # FIXME if keywords: write_field('Keywords', keywords) - for platform in self.get_platforms(): + platforms = self.get_platforms() or [] + for platform in platforms: write_field('Platform', platform) self._write_list(file, 'Classifier', self.get_classifiers()) @@ -213,7 +228,11 @@ def write_pkg_file(self, file): # noqa: C901 # is too complex (14) # FIXME self._write_list(file, 'License-File', self.license_files or []) - file.write("\n%s\n\n" % self.get_long_description()) + long_description = self.get_long_description() + if long_description: + file.write("\n%s" % long_description) + if not long_description.endswith("\n"): + file.write("\n") sequence = tuple, list @@ -221,7 +240,7 @@ sequence = tuple, list def check_importable(dist, attr, value): try: - ep = pkg_resources.EntryPoint.parse('x=' + value) + ep = metadata.EntryPoint(value=value, name=None, group=None) assert not ep.extras except (TypeError, ValueError, AttributeError, AssertionError) as e: raise DistutilsSetupError( @@ -279,7 +298,7 @@ def _check_extra(extra, reqs): name, sep, marker = extra.partition(':') if marker and pkg_resources.invalid_marker(marker): raise DistutilsSetupError("Invalid environment marker: " + marker) - list(pkg_resources.parse_requirements(reqs)) + list(_reqs.parse(reqs)) def assert_bool(dist, attr, value): @@ -299,7 +318,7 @@ def invalid_unless_false(dist, attr, value): def check_requirements(dist, attr, value): """Verify that install_requires is a valid requirements list""" try: - list(pkg_resources.parse_requirements(value)) + list(_reqs.parse(value)) if isinstance(value, (dict, set)): raise TypeError("Unordered types are not allowed") except (TypeError, ValueError) as error: @@ -324,8 +343,8 @@ def check_specifier(dist, attr, value): def check_entry_points(dist, attr, value): """Verify that entry_points map is parseable""" try: - pkg_resources.EntryPoint.parse_map(value) - except ValueError as e: + _entry_points.load(value) + except Exception as e: raise DistutilsSetupError(e) from e @@ -448,7 +467,7 @@ class Distribution(_Distribution): self.patch_missing_pkg_info(attrs) self.dependency_links = attrs.pop('dependency_links', []) self.setup_requires = attrs.pop('setup_requires', []) - for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): + for ep in metadata.entry_points(group='distutils.setup_keywords'): vars(self).setdefault(ep.name, None) _Distribution.__init__( self, @@ -459,6 +478,13 @@ class Distribution(_Distribution): }, ) + # Save the original dependencies before they are processed into the egg format + self._orig_extras_require = {} + self._orig_install_requires = [] + self._tmp_extras_require = defaultdict(ordered_set.OrderedSet) + + self.set_defaults = ConfigDiscovery(self) + self._set_metadata_defaults(attrs) self.metadata.version = self._normalize_version( @@ -466,6 +492,19 @@ class Distribution(_Distribution): ) self._finalize_requires() + def _validate_metadata(self): + required = {"name"} + provided = { + key + for key in vars(self.metadata) + if getattr(self.metadata, key, None) is not None + } + missing = required - provided + + if missing: + msg = f"Required package metadata is missing: {missing}" + raise DistutilsSetupError(msg) + def _set_metadata_defaults(self, attrs): """ Fill-in missing metadata fields not supported by distutils. @@ -516,6 +555,8 @@ class Distribution(_Distribution): self.metadata.python_requires = self.python_requires if getattr(self, 'extras_require', None): + # Save original before it is messed by _convert_extras_requirements + self._orig_extras_require = self._orig_extras_require or self.extras_require for extra in self.extras_require.keys(): # Since this gets called multiple times at points where the # keys have become 'converted' extras, ensure that we are only @@ -524,6 +565,10 @@ class Distribution(_Distribution): if extra: self.metadata.provides_extras.add(extra) + if getattr(self, 'install_requires', None) and not self._orig_install_requires: + # Save original before it is messed by _move_install_requirements_markers + self._orig_install_requires = self.install_requires + self._convert_extras_requirements() self._move_install_requirements_markers() @@ -534,11 +579,12 @@ class Distribution(_Distribution): `"extra:{marker}": ["barbazquux"]`. """ spec_ext_reqs = getattr(self, 'extras_require', None) or {} - self._tmp_extras_require = defaultdict(list) + tmp = defaultdict(ordered_set.OrderedSet) + self._tmp_extras_require = getattr(self, '_tmp_extras_require', tmp) for section, v in spec_ext_reqs.items(): # Do not strip empty sections. self._tmp_extras_require[section] - for r in pkg_resources.parse_requirements(v): + for r in _reqs.parse(v): suffix = self._suffix_for(r) self._tmp_extras_require[section + suffix].append(r) @@ -564,7 +610,7 @@ class Distribution(_Distribution): return not req.marker spec_inst_reqs = getattr(self, 'install_requires', None) or () - inst_reqs = list(pkg_resources.parse_requirements(spec_inst_reqs)) + inst_reqs = list(_reqs.parse(spec_inst_reqs)) simple_reqs = filter(is_simple_req, inst_reqs) complex_reqs = itertools.filterfalse(is_simple_req, inst_reqs) self.install_requires = list(map(str, simple_reqs)) @@ -572,7 +618,8 @@ class Distribution(_Distribution): for r in complex_reqs: self._tmp_extras_require[':' + str(r.marker)].append(r) self.extras_require = dict( - (k, [str(r) for r in map(self._clean_req, v)]) + # list(dict.fromkeys(...)) ensures a list of unique strings + (k, list(dict.fromkeys(str(r) for r in map(self._clean_req, v)))) for k, v in self._tmp_extras_require.items() ) @@ -705,7 +752,10 @@ class Distribution(_Distribution): return opt underscore_opt = opt.replace('-', '_') - commands = distutils.command.__all__ + self._setuptools_commands() + commands = list(itertools.chain( + distutils.command.__all__, + self._setuptools_commands(), + )) if ( not section.startswith('options') and section != 'metadata' @@ -723,9 +773,8 @@ class Distribution(_Distribution): def _setuptools_commands(self): try: - dist = pkg_resources.get_distribution('setuptools') - return list(dist.get_entry_map('distutils.commands')) - except pkg_resources.DistributionNotFound: + return metadata.distribution('setuptools').entry_points.names + except metadata.PackageNotFoundError: # during bootstrapping, distribution doesn't exist return [] @@ -788,23 +837,39 @@ class Distribution(_Distribution): except ValueError as e: raise DistutilsOptionError(e) from e + def _get_project_config_files(self, filenames): + """Add default file and split between INI and TOML""" + tomlfiles = [] + standard_project_metadata = Path(self.src_root or os.curdir, "pyproject.toml") + if filenames is not None: + parts = partition(lambda f: Path(f).suffix == ".toml", filenames) + filenames = list(parts[0]) # 1st element => predicate is False + tomlfiles = list(parts[1]) # 2nd element => predicate is True + elif standard_project_metadata.exists(): + tomlfiles = [standard_project_metadata] + return filenames, tomlfiles + def parse_config_files(self, filenames=None, ignore_option_errors=False): """Parses configuration files from various levels and loads configuration. - """ - self._parse_config_files(filenames=filenames) + inifiles, tomlfiles = self._get_project_config_files(filenames) + + self._parse_config_files(filenames=inifiles) - parse_configuration( + setupcfg.parse_configuration( self, self.command_options, ignore_option_errors=ignore_option_errors ) + for filename in tomlfiles: + pyprojecttoml.apply_configuration(self, filename, ignore_option_errors) + self._finalize_requires() self._finalize_license_files() def fetch_build_eggs(self, requires): """Resolve pre-setup requirements""" resolved_dists = pkg_resources.working_set.resolve( - pkg_resources.parse_requirements(requires), + _reqs.parse(requires), installer=self.fetch_build_egg, replace_conflicting=True, ) @@ -824,7 +889,7 @@ class Distribution(_Distribution): def by_order(hook): return getattr(hook, 'order', 0) - defined = pkg_resources.iter_entry_points(group) + defined = metadata.entry_points(group=group) filtered = itertools.filterfalse(self._removed, defined) loaded = map(lambda e: e.load(), filtered) for ep in sorted(loaded, key=by_order): @@ -845,12 +910,21 @@ class Distribution(_Distribution): return ep.name in removed def _finalize_setup_keywords(self): - for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): + for ep in metadata.entry_points(group='distutils.setup_keywords'): value = getattr(self, ep.name, None) if value is not None: - ep.require(installer=self.fetch_build_egg) + self._install_dependencies(ep) ep.load()(self, ep.name, value) + def _install_dependencies(self, ep): + """ + Given an entry point, ensure that any declared extras for + its distribution are installed. + """ + for req in nspektr.missing(ep): + # fetch_build_egg expects pkg_resources.Requirement + self.fetch_build_egg(pkg_resources.Requirement(str(req))) + def get_egg_cache_dir(self): egg_cache_dir = os.path.join(os.curdir, '.eggs') if not os.path.exists(egg_cache_dir): @@ -881,27 +955,25 @@ class Distribution(_Distribution): if command in self.cmdclass: return self.cmdclass[command] - eps = pkg_resources.iter_entry_points('distutils.commands', command) + eps = metadata.entry_points(group='distutils.commands', name=command) for ep in eps: - ep.require(installer=self.fetch_build_egg) + self._install_dependencies(ep) self.cmdclass[command] = cmdclass = ep.load() return cmdclass else: return _Distribution.get_command_class(self, command) def print_commands(self): - for ep in pkg_resources.iter_entry_points('distutils.commands'): + for ep in metadata.entry_points(group='distutils.commands'): if ep.name not in self.cmdclass: - # don't require extras as the commands won't be invoked - cmdclass = ep.resolve() + cmdclass = ep.load() self.cmdclass[ep.name] = cmdclass return _Distribution.print_commands(self) def get_command_list(self): - for ep in pkg_resources.iter_entry_points('distutils.commands'): + for ep in metadata.entry_points(group='distutils.commands'): if ep.name not in self.cmdclass: - # don't require extras as the commands won't be invoked - cmdclass = ep.resolve() + cmdclass = ep.load() self.cmdclass[ep.name] = cmdclass return _Distribution.get_command_list(self) @@ -1144,6 +1216,13 @@ class Distribution(_Distribution): sys.stdout.detach(), encoding, errors, newline, line_buffering ) + def run_command(self, command): + self.set_defaults() + # Postpone defaults until all explicit configuration is considered + # (setup() args, config files, command line and plugins) + + super().run_command(command) + class DistDeprecationWarning(SetuptoolsDeprecationWarning): """Class for warning about deprecations in dist in diff --git a/setuptools/errors.py b/setuptools/errors.py index f4d35a63..ec7fb3b6 100644 --- a/setuptools/errors.py +++ b/setuptools/errors.py @@ -4,17 +4,6 @@ Provides exceptions used by setuptools modules. """ from distutils import errors as _distutils_errors -from distutils.errors import DistutilsError - - -class RemovedCommandError(DistutilsError, RuntimeError): - """Error used for commands that have been removed in setuptools. - - Since ``setuptools`` is built on ``distutils``, simply removing a command - from ``setuptools`` will make the behavior fall back to ``distutils``; this - error is raised if a command exists in ``distutils`` but has been actively - removed in ``setuptools``. - """ # Re-export errors from distutils to facilitate the migration to PEP632 @@ -38,3 +27,32 @@ UnknownFileError = _distutils_errors.UnknownFileError # The root error class in the hierarchy BaseError = _distutils_errors.DistutilsError + + +class RemovedCommandError(BaseError, RuntimeError): + """Error used for commands that have been removed in setuptools. + + Since ``setuptools`` is built on ``distutils``, simply removing a command + from ``setuptools`` will make the behavior fall back to ``distutils``; this + error is raised if a command exists in ``distutils`` but has been actively + removed in ``setuptools``. + """ + + +class PackageDiscoveryError(BaseError, RuntimeError): + """Impossible to perform automatic discovery of packages and/or modules. + + The current project layout or given discovery options can lead to problems when + scanning the project directory. + + Setuptools might also refuse to complete auto-discovery if an error prone condition + is detected (e.g. when a project is organised as a flat-layout but contains + multiple directories that can be taken as top-level packages inside a single + distribution [*]_). In these situations the users are encouraged to be explicit + about which packages to include or to make the discovery parameters more specific. + + .. [*] Since multi-package distributions are uncommon it is very likely that the + developers did not intend for all the directories to be packaged, and are just + leaving auxiliary code in the repository top-level, such as maintenance-related + scripts. + """ diff --git a/setuptools/extension.py b/setuptools/extension.py index 1820722a..f696c9c1 100644 --- a/setuptools/extension.py +++ b/setuptools/extension.py @@ -34,7 +34,7 @@ class Extension(_Extension): # The *args is needed for compatibility as calls may use positional # arguments. py_limited_api may be set only via keyword. self.py_limited_api = kw.pop("py_limited_api", False) - _Extension.__init__(self, name, sources, *args, **kw) + super().__init__(name, sources, *args, **kw) def _convert_pyx_sources_to_lang(self): """ diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py index baca1afa..192e55f6 100644 --- a/setuptools/extern/__init__.py +++ b/setuptools/extern/__init__.py @@ -69,5 +69,8 @@ class VendorImporter: sys.meta_path.append(self) -names = 'packaging', 'pyparsing', 'ordered_set', 'more_itertools', +names = ( + 'packaging', 'pyparsing', 'ordered_set', 'more_itertools', 'importlib_metadata', + 'zipp', 'importlib_resources', 'jaraco', 'typing_extensions', 'nspektr', 'tomli', +) VendorImporter(__name__, names, 'setuptools._vendor').install() diff --git a/setuptools/logging.py b/setuptools/logging.py new file mode 100644 index 00000000..15b57613 --- /dev/null +++ b/setuptools/logging.py @@ -0,0 +1,36 @@ +import sys +import logging +import distutils.log +from . import monkey + + +def _not_warning(record): + return record.levelno < logging.WARNING + + +def configure(): + """ + Configure logging to emit warning and above to stderr + and everything else to stdout. This behavior is provided + for compatibilty with distutils.log but may change in + the future. + """ + err_handler = logging.StreamHandler() + err_handler.setLevel(logging.WARNING) + out_handler = logging.StreamHandler(sys.stdout) + out_handler.addFilter(_not_warning) + handlers = err_handler, out_handler + logging.basicConfig( + format="{message}", style='{', handlers=handlers, level=logging.DEBUG) + monkey.patch_func(set_threshold, distutils.log, 'set_threshold') + + # For some reason `distutils.log` module is getting cached in `distutils.dist` + # and then loaded again when patched, + # implying: id(distutils.log) != id(distutils.dist.log). + # Make sure the same module object is used everywhere: + distutils.dist.log = distutils.log + + +def set_threshold(level): + logging.root.setLevel(level*10) + return set_threshold.unpatched(level) diff --git a/setuptools/package_index.py b/setuptools/package_index.py index 270e7f3c..14881d29 100644 --- a/setuptools/package_index.py +++ b/setuptools/package_index.py @@ -285,7 +285,7 @@ class PackageIndex(Environment): self, index_url="https://pypi.org/simple/", hosts=('*',), ca_bundle=None, verify_ssl=True, *args, **kw ): - Environment.__init__(self, *args, **kw) + super().__init__(*args, **kw) self.index_url = index_url + "/" [:not index_url.endswith('/')] self.scanned_urls = {} self.fetched_urls = {} @@ -680,8 +680,7 @@ class PackageIndex(Environment): # Make sure the file has been downloaded to the temp dir. if os.path.dirname(filename) != tmpdir: dst = os.path.join(tmpdir, basename) - from setuptools.command.easy_install import samefile - if not samefile(filename, dst): + if not (os.path.exists(dst) and os.path.samefile(filename, dst)): shutil.copy2(filename, dst) filename = dst @@ -1002,7 +1001,7 @@ class PyPIConfig(configparser.RawConfigParser): Load from ~/.pypirc """ defaults = dict.fromkeys(['username', 'password', 'repository'], '') - configparser.RawConfigParser.__init__(self, defaults) + super().__init__(defaults) rc = os.path.join(os.path.expanduser('~'), '.pypirc') if os.path.exists(rc): diff --git a/setuptools/tests/config/__init__.py b/setuptools/tests/config/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/tests/config/__init__.py diff --git a/setuptools/tests/config/downloads/.gitignore b/setuptools/tests/config/downloads/.gitignore new file mode 100644 index 00000000..df3779fc --- /dev/null +++ b/setuptools/tests/config/downloads/.gitignore @@ -0,0 +1,4 @@ +* +!.gitignore +!__init__.py +!preload.py diff --git a/setuptools/tests/config/downloads/__init__.py b/setuptools/tests/config/downloads/__init__.py new file mode 100644 index 00000000..9fb9b14b --- /dev/null +++ b/setuptools/tests/config/downloads/__init__.py @@ -0,0 +1,57 @@ +import re +import time +from pathlib import Path +from urllib.error import HTTPError +from urllib.request import urlopen + +__all__ = ["DOWNLOAD_DIR", "retrieve_file", "output_file", "urls_from_file"] + + +NAME_REMOVE = ("http://", "https://", "github.com/", "/raw/") +DOWNLOAD_DIR = Path(__file__).parent + + +# ---------------------------------------------------------------------- +# Please update ./preload.py accordingly when modifying this file +# ---------------------------------------------------------------------- + + +def output_file(url: str, download_dir: Path = DOWNLOAD_DIR): + file_name = url.strip() + for part in NAME_REMOVE: + file_name = file_name.replace(part, '').strip().strip('/:').strip() + return Path(download_dir, re.sub(r"[^\-_\.\w\d]+", "_", file_name)) + + +def retrieve_file(url: str, download_dir: Path = DOWNLOAD_DIR, wait: float = 5): + path = output_file(url, download_dir) + if path.exists(): + print(f"Skipping {url} (already exists: {path})") + else: + download_dir.mkdir(exist_ok=True, parents=True) + print(f"Downloading {url} to {path}") + try: + download(url, path) + except HTTPError: + time.sleep(wait) # wait a few seconds and try again. + download(url, path) + return path + + +def urls_from_file(list_file: Path): + """``list_file`` should be a text file where each line corresponds to a URL to + download. + """ + print(f"file: {list_file}") + content = list_file.read_text(encoding="utf-8") + return [url for url in content.splitlines() if not url.startswith("#")] + + +def download(url: str, dest: Path): + with urlopen(url) as f: + data = f.read() + + with open(dest, "wb") as f: + f.write(data) + + assert Path(dest).exists() diff --git a/setuptools/tests/config/downloads/preload.py b/setuptools/tests/config/downloads/preload.py new file mode 100644 index 00000000..64b3f1c8 --- /dev/null +++ b/setuptools/tests/config/downloads/preload.py @@ -0,0 +1,18 @@ +"""This file can be used to preload files needed for testing. + +For example you can use:: + + cd setuptools/tests/config + python -m downloads.preload setupcfg_examples.txt + +to make sure the `setup.cfg` examples are downloaded before starting the tests. +""" +import sys +from pathlib import Path + +from . import retrieve_file, urls_from_file + + +if __name__ == "__main__": + urls = urls_from_file(Path(sys.argv[1])) + list(map(retrieve_file, urls)) diff --git a/setuptools/tests/config/setupcfg_examples.txt b/setuptools/tests/config/setupcfg_examples.txt new file mode 100644 index 00000000..5db35654 --- /dev/null +++ b/setuptools/tests/config/setupcfg_examples.txt @@ -0,0 +1,23 @@ +# ==================================================================== +# Some popular packages that use setup.cfg (and others not so popular) +# Reference: https://hugovk.github.io/top-pypi-packages/ +# ==================================================================== +https://github.com/pypa/setuptools/raw/52c990172fec37766b3566679724aa8bf70ae06d/setup.cfg +https://github.com/pypa/wheel/raw/0acd203cd896afec7f715aa2ff5980a403459a3b/setup.cfg +https://github.com/python/importlib_metadata/raw/2f05392ca980952a6960d82b2f2d2ea10aa53239/setup.cfg +https://github.com/jaraco/skeleton/raw/d9008b5c510cd6969127a6a2ab6f832edddef296/setup.cfg +https://github.com/jaraco/zipp/raw/700d3a96390e970b6b962823bfea78b4f7e1c537/setup.cfg +https://github.com/pallets/jinja/raw/7d72eb7fefb7dce065193967f31f805180508448/setup.cfg +https://github.com/tkem/cachetools/raw/2fd87a94b8d3861d80e9e4236cd480bfdd21c90d/setup.cfg +https://github.com/aio-libs/aiohttp/raw/5e0e6b7080f2408d5f1dd544c0e1cf88378b7b10/setup.cfg +https://github.com/pallets/flask/raw/9486b6cf57bd6a8a261f67091aca8ca78eeec1e3/setup.cfg +https://github.com/pallets/click/raw/6411f425fae545f42795665af4162006b36c5e4a/setup.cfg +https://github.com/sqlalchemy/sqlalchemy/raw/533f5718904b620be8d63f2474229945d6f8ba5d/setup.cfg +https://github.com/pytest-dev/pluggy/raw/461ef63291d13589c4e21aa182cd1529257e9a0a/setup.cfg +https://github.com/pytest-dev/pytest/raw/c7be96dae487edbd2f55b561b31b68afac1dabe6/setup.cfg +https://github.com/tqdm/tqdm/raw/fc69d5dcf578f7c7986fa76841a6b793f813df35/setup.cfg +https://github.com/platformdirs/platformdirs/raw/7b7852128dd6f07511b618d6edea35046bd0c6ff/setup.cfg +https://github.com/pandas-dev/pandas/raw/bc17343f934a33dc231c8c74be95d8365537c376/setup.cfg +https://github.com/django/django/raw/4e249d11a6e56ca8feb4b055b681cec457ef3a3d/setup.cfg +https://github.com/pyscaffold/pyscaffold/raw/de7aa5dc059fbd04307419c667cc4961bc9df4b8/setup.cfg +https://github.com/pypa/virtualenv/raw/f92eda6e3da26a4d28c2663ffb85c4960bdb990c/setup.cfg diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py new file mode 100644 index 00000000..4f541697 --- /dev/null +++ b/setuptools/tests/config/test_apply_pyprojecttoml.py @@ -0,0 +1,323 @@ +"""Make sure that applying the configuration from pyproject.toml is equivalent to +applying a similar configuration from setup.cfg + +To run these tests offline, please have a look on ``./downloads/preload.py`` +""" +import io +import re +import tarfile +from pathlib import Path +from unittest.mock import Mock +from zipfile import ZipFile + +import pytest +from ini2toml.api import Translator + +import setuptools # noqa ensure monkey patch to metadata +from setuptools.dist import Distribution +from setuptools.config import setupcfg, pyprojecttoml +from setuptools.config import expand +from setuptools.config._apply_pyprojecttoml import _WouldIgnoreField, _some_attrgetter +from setuptools.command.egg_info import write_requirements + +from .downloads import retrieve_file, urls_from_file + + +HERE = Path(__file__).parent +EXAMPLES_FILE = "setupcfg_examples.txt" + + +def makedist(path, **attrs): + return Distribution({"src_root": path, **attrs}) + + +@pytest.mark.parametrize("url", urls_from_file(HERE / EXAMPLES_FILE)) +@pytest.mark.filterwarnings("ignore") +@pytest.mark.uses_network +def test_apply_pyproject_equivalent_to_setupcfg(url, monkeypatch, tmp_path): + monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.0.1")) + setupcfg_example = retrieve_file(url) + pyproject_example = Path(tmp_path, "pyproject.toml") + toml_config = Translator().translate(setupcfg_example.read_text(), "setup.cfg") + pyproject_example.write_text(toml_config) + + dist_toml = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject_example) + dist_cfg = setupcfg.apply_configuration(makedist(tmp_path), setupcfg_example) + + pkg_info_toml = core_metadata(dist_toml) + pkg_info_cfg = core_metadata(dist_cfg) + assert pkg_info_toml == pkg_info_cfg + + if any(getattr(d, "license_files", None) for d in (dist_toml, dist_cfg)): + assert set(dist_toml.license_files) == set(dist_cfg.license_files) + + if any(getattr(d, "entry_points", None) for d in (dist_toml, dist_cfg)): + print(dist_cfg.entry_points) + ep_toml = {(k, *sorted(i.replace(" ", "") for i in v)) + for k, v in dist_toml.entry_points.items()} + ep_cfg = {(k, *sorted(i.replace(" ", "") for i in v)) + for k, v in dist_cfg.entry_points.items()} + assert ep_toml == ep_cfg + + if any(getattr(d, "package_data", None) for d in (dist_toml, dist_cfg)): + pkg_data_toml = {(k, *sorted(v)) for k, v in dist_toml.package_data.items()} + pkg_data_cfg = {(k, *sorted(v)) for k, v in dist_cfg.package_data.items()} + assert pkg_data_toml == pkg_data_cfg + + if any(getattr(d, "data_files", None) for d in (dist_toml, dist_cfg)): + data_files_toml = {(k, *sorted(v)) for k, v in dist_toml.data_files} + data_files_cfg = {(k, *sorted(v)) for k, v in dist_cfg.data_files} + assert data_files_toml == data_files_cfg + + assert set(dist_toml.install_requires) == set(dist_cfg.install_requires) + if any(getattr(d, "extras_require", None) for d in (dist_toml, dist_cfg)): + if ( + "testing" in dist_toml.extras_require + and "testing" not in dist_cfg.extras_require + ): + # ini2toml can automatically convert `tests_require` to `testing` extra + dist_toml.extras_require.pop("testing") + extra_req_toml = {(k, *sorted(v)) for k, v in dist_toml.extras_require.items()} + extra_req_cfg = {(k, *sorted(v)) for k, v in dist_cfg.extras_require.items()} + assert extra_req_toml == extra_req_cfg + + +PEP621_EXAMPLE = """\ +[project] +name = "spam" +version = "2020.0.0" +description = "Lovely Spam! Wonderful Spam!" +readme = "README.rst" +requires-python = ">=3.8" +license = {file = "LICENSE.txt"} +keywords = ["egg", "bacon", "sausage", "tomatoes", "Lobster Thermidor"] +authors = [ + {email = "hi@pradyunsg.me"}, + {name = "Tzu-Ping Chung"} +] +maintainers = [ + {name = "Brett Cannon", email = "brett@python.org"} +] +classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python" +] + +dependencies = [ + "httpx", + "gidgethub[httpx]>4.0.0", + "django>2.1; os_name != 'nt'", + "django>2.0; os_name == 'nt'" +] + +[project.optional-dependencies] +test = [ + "pytest < 5.0.0", + "pytest-cov[all]" +] + +[project.urls] +homepage = "http://example.com" +documentation = "http://readthedocs.org" +repository = "http://github.com" +changelog = "http://github.com/me/spam/blob/master/CHANGELOG.md" + +[project.scripts] +spam-cli = "spam:main_cli" + +[project.gui-scripts] +spam-gui = "spam:main_gui" + +[project.entry-points."spam.magical"] +tomatoes = "spam:main_tomatoes" +""" + +PEP621_EXAMPLE_SCRIPT = """ +def main_cli(): pass +def main_gui(): pass +def main_tomatoes(): pass +""" + + +def _pep621_example_project(tmp_path, readme="README.rst"): + pyproject = tmp_path / "pyproject.toml" + text = PEP621_EXAMPLE + replacements = {'readme = "README.rst"': f'readme = "{readme}"'} + for orig, subst in replacements.items(): + text = text.replace(orig, subst) + pyproject.write_text(text) + + (tmp_path / readme).write_text("hello world") + (tmp_path / "LICENSE.txt").write_text("--- LICENSE stub ---") + (tmp_path / "spam.py").write_text(PEP621_EXAMPLE_SCRIPT) + return pyproject + + +def test_pep621_example(tmp_path): + """Make sure the example in PEP 621 works""" + pyproject = _pep621_example_project(tmp_path) + dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject) + assert dist.metadata.license == "--- LICENSE stub ---" + assert set(dist.metadata.license_files) == {"LICENSE.txt"} + + +@pytest.mark.parametrize( + "readme, ctype", + [ + ("Readme.txt", "text/plain"), + ("readme.md", "text/markdown"), + ("text.rst", "text/x-rst"), + ] +) +def test_readme_content_type(tmp_path, readme, ctype): + pyproject = _pep621_example_project(tmp_path, readme) + dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject) + assert dist.metadata.long_description_content_type == ctype + + +def test_undefined_content_type(tmp_path): + pyproject = _pep621_example_project(tmp_path, "README.tex") + with pytest.raises(ValueError, match="Undefined content type for README.tex"): + pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject) + + +def test_no_explicit_content_type_for_missing_extension(tmp_path): + pyproject = _pep621_example_project(tmp_path, "README") + dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject) + assert dist.metadata.long_description_content_type is None + + +# TODO: After PEP 639 is accepted, we have to move the license-files +# to the `project` table instead of `tool.setuptools` +def test_license_and_license_files(tmp_path): + pyproject = _pep621_example_project(tmp_path, "README") + text = pyproject.read_text(encoding="utf-8") + + # Sanity-check + assert 'license = {file = "LICENSE.txt"}' in text + assert "[tool.setuptools]" not in text + + text += '\n[tool.setuptools]\nlicense-files = ["_FILE*"]\n' + pyproject.write_text(text, encoding="utf-8") + (tmp_path / "_FILE.txt").touch() + (tmp_path / "_FILE.rst").touch() + + # Would normally match the `license_files` glob patterns, but we want to exclude it + # by being explicit. On the other hand, its contents should be added to `license` + (tmp_path / "LICENSE.txt").write_text("LicenseRef-Proprietary\n", encoding="utf-8") + + dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject) + assert set(dist.metadata.license_files) == {"_FILE.rst", "_FILE.txt"} + assert dist.metadata.license == "LicenseRef-Proprietary\n" + + +class TestPresetField: + def pyproject(self, tmp_path, dynamic, extra_content=""): + content = f"[project]\nname = 'proj'\ndynamic = {dynamic!r}\n" + if "version" not in dynamic: + content += "version = '42'\n" + file = tmp_path / "pyproject.toml" + file.write_text(content + extra_content, encoding="utf-8") + return file + + @pytest.mark.parametrize( + "attr, field, value", + [ + ("install_requires", "dependencies", ["six"]), + ("classifiers", "classifiers", ["Private :: Classifier"]), + ] + ) + def test_not_listed_in_dynamic(self, tmp_path, attr, field, value): + """For the time being we just warn if the user pre-set values (e.g. via + ``setup.py``) but do not include them in ``dynamic``. + """ + pyproject = self.pyproject(tmp_path, []) + dist = makedist(tmp_path, **{attr: value}) + msg = re.compile(f"defined outside of `pyproject.toml`:.*{field}", re.S) + with pytest.warns(_WouldIgnoreField, match=msg): + dist = pyprojecttoml.apply_configuration(dist, pyproject) + + # TODO: Once support for pyproject.toml config stabilizes attr should be None + dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist) + assert dist_value == value + + @pytest.mark.parametrize( + "attr, field, value", + [ + ("install_requires", "dependencies", []), + ("extras_require", "optional-dependencies", {}), + ("install_requires", "dependencies", ["six"]), + ("classifiers", "classifiers", ["Private :: Classifier"]), + ] + ) + def test_listed_in_dynamic(self, tmp_path, attr, field, value): + pyproject = self.pyproject(tmp_path, [field]) + dist = makedist(tmp_path, **{attr: value}) + dist = pyprojecttoml.apply_configuration(dist, pyproject) + dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist) + assert dist_value == value + + def test_optional_dependencies_dont_remove_env_markers(self, tmp_path): + """ + Internally setuptools converts dependencies with markers to "extras". + If ``install_requires`` is given by ``setup.py``, we have to ensure that + applying ``optional-dependencies`` does not overwrite the mandatory + dependencies with markers (see #3204). + """ + # If setuptools replace its internal mechanism that uses `requires.txt` + # this test has to be rewritten to adapt accordingly + extra = "\n[project.optional-dependencies]\nfoo = ['bar>1']\n" + pyproject = self.pyproject(tmp_path, ["dependencies"], extra) + install_req = ['importlib-resources (>=3.0.0) ; python_version < "3.7"'] + dist = makedist(tmp_path, install_requires=install_req) + dist = pyprojecttoml.apply_configuration(dist, pyproject) + assert "foo" in dist.extras_require + assert ':python_version < "3.7"' in dist.extras_require + egg_info = dist.get_command_obj("egg_info") + write_requirements(egg_info, tmp_path, tmp_path / "requires.txt") + reqs = (tmp_path / "requires.txt").read_text(encoding="utf-8") + assert "importlib-resources" in reqs + assert "bar" in reqs + + +class TestMeta: + def test_example_file_in_sdist(self, setuptools_sdist): + """Meta test to ensure tests can run from sdist""" + with tarfile.open(setuptools_sdist) as tar: + assert any(name.endswith(EXAMPLES_FILE) for name in tar.getnames()) + + def test_example_file_not_in_wheel(self, setuptools_wheel): + """Meta test to ensure auxiliary test files are not in wheel""" + with ZipFile(setuptools_wheel) as zipfile: + assert not any(name.endswith(EXAMPLES_FILE) for name in zipfile.namelist()) + + +# --- Auxiliary Functions --- + + +def core_metadata(dist) -> str: + with io.StringIO() as buffer: + dist.metadata.write_pkg_file(buffer) + pkg_file_txt = buffer.getvalue() + + skip_prefixes = () + skip_lines = set() + # ---- DIFF NORMALISATION ---- + # PEP 621 is very particular about author/maintainer metadata conversion, so skip + skip_prefixes += ("Author:", "Author-email:", "Maintainer:", "Maintainer-email:") + # May be redundant with Home-page + skip_prefixes += ("Project-URL: Homepage,", "Home-page:") + # May be missing in original (relying on default) but backfilled in the TOML + skip_prefixes += ("Description-Content-Type:",) + # ini2toml can automatically convert `tests_require` to `testing` extra + skip_lines.add("Provides-Extra: testing") + # Remove empty lines + skip_lines.add("") + + result = [] + for line in pkg_file_txt.splitlines(): + if line.startswith(skip_prefixes) or line in skip_lines: + continue + result.append(line + "\n") + + return "".join(result) diff --git a/setuptools/tests/config/test_expand.py b/setuptools/tests/config/test_expand.py new file mode 100644 index 00000000..15053c8f --- /dev/null +++ b/setuptools/tests/config/test_expand.py @@ -0,0 +1,185 @@ +import os + +import pytest + +from distutils.errors import DistutilsOptionError +from setuptools.config import expand +from setuptools.discovery import find_package_path + + +def write_files(files, root_dir): + for file, content in files.items(): + path = root_dir / file + path.parent.mkdir(exist_ok=True, parents=True) + path.write_text(content) + + +def test_glob_relative(tmp_path, monkeypatch): + files = { + "dir1/dir2/dir3/file1.txt", + "dir1/dir2/file2.txt", + "dir1/file3.txt", + "a.ini", + "b.ini", + "dir1/c.ini", + "dir1/dir2/a.ini", + } + + write_files({k: "" for k in files}, tmp_path) + patterns = ["**/*.txt", "[ab].*", "**/[ac].ini"] + monkeypatch.chdir(tmp_path) + assert set(expand.glob_relative(patterns)) == files + # Make sure the same APIs work outside cwd + assert set(expand.glob_relative(patterns, tmp_path)) == files + + +def test_read_files(tmp_path, monkeypatch): + + dir_ = tmp_path / "dir_" + (tmp_path / "_dir").mkdir(exist_ok=True) + (tmp_path / "a.txt").touch() + files = { + "a.txt": "a", + "dir1/b.txt": "b", + "dir1/dir2/c.txt": "c" + } + write_files(files, dir_) + + with monkeypatch.context() as m: + m.chdir(dir_) + assert expand.read_files(list(files)) == "a\nb\nc" + + cannot_access_msg = r"Cannot access '.*\.\..a\.txt'" + with pytest.raises(DistutilsOptionError, match=cannot_access_msg): + expand.read_files(["../a.txt"]) + + # Make sure the same APIs work outside cwd + assert expand.read_files(list(files), dir_) == "a\nb\nc" + with pytest.raises(DistutilsOptionError, match=cannot_access_msg): + expand.read_files(["../a.txt"], dir_) + + +class TestReadAttr: + def test_read_attr(self, tmp_path, monkeypatch): + files = { + "pkg/__init__.py": "", + "pkg/sub/__init__.py": "VERSION = '0.1.1'", + "pkg/sub/mod.py": ( + "VALUES = {'a': 0, 'b': {42}, 'c': (0, 1, 1)}\n" + "raise SystemExit(1)" + ), + } + write_files(files, tmp_path) + + with monkeypatch.context() as m: + m.chdir(tmp_path) + # Make sure it can read the attr statically without evaluating the module + assert expand.read_attr('pkg.sub.VERSION') == '0.1.1' + values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'}) + + assert values['a'] == 0 + assert values['b'] == {42} + + # Make sure the same APIs work outside cwd + assert expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path) == '0.1.1' + values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'}, tmp_path) + assert values['c'] == (0, 1, 1) + + def test_import_order(self, tmp_path): + """ + Sometimes the import machinery will import the parent package of a nested + module, which triggers side-effects and might create problems (see issue #3176) + + ``read_attr`` should bypass these limitations by resolving modules statically + (via ast.literal_eval). + """ + files = { + "src/pkg/__init__.py": "from .main import func\nfrom .about import version", + "src/pkg/main.py": "import super_complicated_dep\ndef func(): return 42", + "src/pkg/about.py": "version = '42'", + } + write_files(files, tmp_path) + attr_desc = "pkg.about.version" + package_dir = {"": "src"} + # `import super_complicated_dep` should not run, otherwise the build fails + assert expand.read_attr(attr_desc, package_dir, tmp_path) == "42" + + +@pytest.mark.parametrize( + 'package_dir, file, module, return_value', + [ + ({"": "src"}, "src/pkg/main.py", "pkg.main", 42), + ({"pkg": "lib"}, "lib/main.py", "pkg.main", 13), + ({}, "single_module.py", "single_module", 70), + ({}, "flat_layout/pkg.py", "flat_layout.pkg", 836), + ] +) +def test_resolve_class(tmp_path, package_dir, file, module, return_value): + files = {file: f"class Custom:\n def testing(self): return {return_value}"} + write_files(files, tmp_path) + cls = expand.resolve_class(f"{module}.Custom", package_dir, tmp_path) + assert cls().testing() == return_value + + +@pytest.mark.parametrize( + 'args, pkgs', + [ + ({"where": ["."], "namespaces": False}, {"pkg", "other"}), + ({"where": [".", "dir1"], "namespaces": False}, {"pkg", "other", "dir2"}), + ({"namespaces": True}, {"pkg", "other", "dir1", "dir1.dir2"}), + ({}, {"pkg", "other", "dir1", "dir1.dir2"}), # default value for `namespaces` + ] +) +def test_find_packages(tmp_path, args, pkgs): + files = { + "pkg/__init__.py", + "other/__init__.py", + "dir1/dir2/__init__.py", + } + write_files({k: "" for k in files}, tmp_path) + + package_dir = {} + kwargs = {"root_dir": tmp_path, "fill_package_dir": package_dir, **args} + where = kwargs.get("where", ["."]) + assert set(expand.find_packages(**kwargs)) == pkgs + for pkg in pkgs: + pkg_path = find_package_path(pkg, package_dir, tmp_path) + assert os.path.exists(pkg_path) + + # Make sure the same APIs work outside cwd + where = [ + str((tmp_path / p).resolve()).replace(os.sep, "/") # ensure posix-style paths + for p in args.pop("where", ["."]) + ] + + assert set(expand.find_packages(where=where, **args)) == pkgs + + +@pytest.mark.parametrize( + "files, where, expected_package_dir", + [ + (["pkg1/__init__.py", "pkg1/other.py"], ["."], {}), + (["pkg1/__init__.py", "pkg2/__init__.py"], ["."], {}), + (["src/pkg1/__init__.py", "src/pkg1/other.py"], ["src"], {"": "src"}), + (["src/pkg1/__init__.py", "src/pkg2/__init__.py"], ["src"], {"": "src"}), + ( + ["src1/pkg1/__init__.py", "src2/pkg2/__init__.py"], + ["src1", "src2"], + {"pkg1": "src1/pkg1", "pkg2": "src2/pkg2"}, + ), + ( + ["src/pkg1/__init__.py", "pkg2/__init__.py"], + ["src", "."], + {"pkg1": "src/pkg1"}, + ), + ], +) +def test_fill_package_dir(tmp_path, files, where, expected_package_dir): + write_files({k: "" for k in files}, tmp_path) + pkg_dir = {} + kwargs = {"root_dir": tmp_path, "fill_package_dir": pkg_dir, "namespaces": False} + pkgs = expand.find_packages(where=where, **kwargs) + assert set(pkg_dir.items()) == set(expected_package_dir.items()) + for pkg in pkgs: + pkg_path = find_package_path(pkg, pkg_dir, tmp_path) + assert os.path.exists(pkg_path) diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py new file mode 100644 index 00000000..200312b5 --- /dev/null +++ b/setuptools/tests/config/test_pyprojecttoml.py @@ -0,0 +1,415 @@ +import logging +import re +from configparser import ConfigParser +from inspect import cleandoc + +import pytest +import tomli_w +from path import Path as _Path + +from setuptools.config._apply_pyprojecttoml import _WouldIgnoreField +from setuptools.config.pyprojecttoml import ( + read_configuration, + expand_configuration, + apply_configuration, + validate, + _InvalidFile, +) +from setuptools.dist import Distribution +from setuptools.errors import OptionError + + +import setuptools # noqa -- force distutils.core to be patched +import distutils.core + +EXAMPLE = """ +[project] +name = "myproj" +keywords = ["some", "key", "words"] +dynamic = ["version", "readme"] +requires-python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +dependencies = [ + 'importlib-metadata>=0.12;python_version<"3.8"', + 'importlib-resources>=1.0;python_version<"3.7"', + 'pathlib2>=2.3.3,<3;python_version < "3.4" and sys.platform != "win32"', +] + +[project.optional-dependencies] +docs = [ + "sphinx>=3", + "sphinx-argparse>=0.2.5", + "sphinx-rtd-theme>=0.4.3", +] +testing = [ + "pytest>=1", + "coverage>=3,<5", +] + +[project.scripts] +exec = "pkg.__main__:exec" + +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools] +package-dir = {"" = "src"} +zip-safe = true +platforms = ["any"] + +[tool.setuptools.packages.find] +where = ["src"] + +[tool.setuptools.cmdclass] +sdist = "pkg.mod.CustomSdist" + +[tool.setuptools.dynamic.version] +attr = "pkg.__version__.VERSION" + +[tool.setuptools.dynamic.readme] +file = ["README.md"] +content-type = "text/markdown" + +[tool.setuptools.package-data] +"*" = ["*.txt"] + +[tool.setuptools.data-files] +"data" = ["_files/*.txt"] + +[tool.distutils.sdist] +formats = "gztar" + +[tool.distutils.bdist_wheel] +universal = true +""" + + +def create_example(path, pkg_root): + pyproject = path / "pyproject.toml" + + files = [ + f"{pkg_root}/pkg/__init__.py", + "_files/file.txt", + ] + if pkg_root != ".": # flat-layout will raise error for multi-package dist + # Ensure namespaces are discovered + files.append(f"{pkg_root}/other/nested/__init__.py") + + for file in files: + (path / file).parent.mkdir(exist_ok=True, parents=True) + (path / file).touch() + + pyproject.write_text(EXAMPLE) + (path / "README.md").write_text("hello world") + (path / f"{pkg_root}/pkg/mod.py").write_text("class CustomSdist: pass") + (path / f"{pkg_root}/pkg/__version__.py").write_text("VERSION = (3, 10)") + (path / f"{pkg_root}/pkg/__main__.py").write_text("def exec(): print('hello')") + + +def verify_example(config, path, pkg_root): + pyproject = path / "pyproject.toml" + pyproject.write_text(tomli_w.dumps(config), encoding="utf-8") + expanded = expand_configuration(config, path) + expanded_project = expanded["project"] + assert read_configuration(pyproject, expand=True) == expanded + assert expanded_project["version"] == "3.10" + assert expanded_project["readme"]["text"] == "hello world" + assert "packages" in expanded["tool"]["setuptools"] + if pkg_root == ".": + # Auto-discovery will raise error for multi-package dist + assert set(expanded["tool"]["setuptools"]["packages"]) == {"pkg"} + else: + assert set(expanded["tool"]["setuptools"]["packages"]) == { + "pkg", + "other", + "other.nested", + } + assert expanded["tool"]["setuptools"]["include-package-data"] is True + assert "" in expanded["tool"]["setuptools"]["package-data"] + assert "*" not in expanded["tool"]["setuptools"]["package-data"] + assert expanded["tool"]["setuptools"]["data-files"] == [ + ("data", ["_files/file.txt"]) + ] + + +def test_read_configuration(tmp_path): + create_example(tmp_path, "src") + pyproject = tmp_path / "pyproject.toml" + + config = read_configuration(pyproject, expand=False) + assert config["project"].get("version") is None + assert config["project"].get("readme") is None + + verify_example(config, tmp_path, "src") + + +@pytest.mark.parametrize( + "pkg_root, opts", + [ + (".", {}), + ("src", {}), + ("lib", {"packages": {"find": {"where": ["lib"]}}}), + ], +) +def test_discovered_package_dir_with_attr_directive_in_config(tmp_path, pkg_root, opts): + create_example(tmp_path, pkg_root) + + pyproject = tmp_path / "pyproject.toml" + + config = read_configuration(pyproject, expand=False) + assert config["project"].get("version") is None + assert config["project"].get("readme") is None + config["tool"]["setuptools"].pop("packages", None) + config["tool"]["setuptools"].pop("package-dir", None) + + config["tool"]["setuptools"].update(opts) + verify_example(config, tmp_path, pkg_root) + + +ENTRY_POINTS = { + "console_scripts": {"a": "mod.a:func"}, + "gui_scripts": {"b": "mod.b:func"}, + "other": {"c": "mod.c:func [extra]"}, +} + + +class TestEntryPoints: + def write_entry_points(self, tmp_path): + entry_points = ConfigParser() + entry_points.read_dict(ENTRY_POINTS) + with open(tmp_path / "entry-points.txt", "w") as f: + entry_points.write(f) + + def pyproject(self, dynamic=None): + project = {"dynamic": dynamic or ["scripts", "gui-scripts", "entry-points"]} + tool = {"dynamic": {"entry-points": {"file": "entry-points.txt"}}} + return {"project": project, "tool": {"setuptools": tool}} + + def test_all_listed_in_dynamic(self, tmp_path): + self.write_entry_points(tmp_path) + expanded = expand_configuration(self.pyproject(), tmp_path) + expanded_project = expanded["project"] + assert len(expanded_project["scripts"]) == 1 + assert expanded_project["scripts"]["a"] == "mod.a:func" + assert len(expanded_project["gui-scripts"]) == 1 + assert expanded_project["gui-scripts"]["b"] == "mod.b:func" + assert len(expanded_project["entry-points"]) == 1 + assert expanded_project["entry-points"]["other"]["c"] == "mod.c:func [extra]" + + @pytest.mark.parametrize("missing_dynamic", ("scripts", "gui-scripts")) + def test_scripts_not_listed_in_dynamic(self, tmp_path, missing_dynamic): + self.write_entry_points(tmp_path) + dynamic = {"scripts", "gui-scripts", "entry-points"} - {missing_dynamic} + + msg = f"defined outside of `pyproject.toml`:.*{missing_dynamic}" + with pytest.warns(_WouldIgnoreField, match=re.compile(msg, re.S)): + expanded = expand_configuration(self.pyproject(dynamic), tmp_path) + + expanded_project = expanded["project"] + assert dynamic < set(expanded_project) + assert len(expanded_project["entry-points"]) == 1 + # TODO: Test the following when pyproject.toml support stabilizes: + # >>> assert missing_dynamic not in expanded_project + + +class TestClassifiers: + def test_dynamic(self, tmp_path): + # Let's create a project example that has dynamic classifiers + # coming from a txt file. + create_example(tmp_path, "src") + classifiers = """\ + Framework :: Flask + Programming Language :: Haskell + """ + (tmp_path / "classifiers.txt").write_text(cleandoc(classifiers)) + + pyproject = tmp_path / "pyproject.toml" + config = read_configuration(pyproject, expand=False) + dynamic = config["project"]["dynamic"] + config["project"]["dynamic"] = list({*dynamic, "classifiers"}) + dynamic_config = config["tool"]["setuptools"]["dynamic"] + dynamic_config["classifiers"] = {"file": "classifiers.txt"} + + # When the configuration is expanded, + # each line of the file should be an different classifier. + validate(config, pyproject) + expanded = expand_configuration(config, tmp_path) + + assert set(expanded["project"]["classifiers"]) == { + "Framework :: Flask", + "Programming Language :: Haskell", + } + + def test_dynamic_without_config(self, tmp_path): + config = """ + [project] + name = "myproj" + version = '42' + dynamic = ["classifiers"] + """ + + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(cleandoc(config)) + with pytest.raises(OptionError, match="No configuration .* .classifiers."): + read_configuration(pyproject) + + def test_dynamic_readme_from_setup_script_args(self, tmp_path): + config = """ + [project] + name = "myproj" + version = '42' + dynamic = ["readme"] + """ + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(cleandoc(config)) + dist = Distribution(attrs={"long_description": "42"}) + # No error should occur because of missing `readme` + dist = apply_configuration(dist, pyproject) + assert dist.metadata.long_description == "42" + + def test_dynamic_without_file(self, tmp_path): + config = """ + [project] + name = "myproj" + version = '42' + dynamic = ["classifiers"] + + [tool.setuptools.dynamic] + classifiers = {file = ["classifiers.txt"]} + """ + + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(cleandoc(config)) + with pytest.warns(UserWarning, match="File .*classifiers.txt. cannot be found"): + expanded = read_configuration(pyproject) + assert "classifiers" not in expanded["project"] + + +@pytest.mark.parametrize( + "example", + ( + """ + [project] + name = "myproj" + version = "1.2" + + [my-tool.that-disrespect.pep518] + value = 42 + """, + ), +) +def test_ignore_unrelated_config(tmp_path, example): + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(cleandoc(example)) + + # Make sure no error is raised due to 3rd party configs in pyproject.toml + assert read_configuration(pyproject) is not None + + +@pytest.mark.parametrize( + "example, error_msg, value_shown_in_debug", + [ + ( + """ + [project] + name = "myproj" + version = "1.2" + requires = ['pywin32; platform_system=="Windows"' ] + """, + "configuration error: `project` must not contain {'requires'} properties", + '"requires": ["pywin32; platform_system==\\"Windows\\""]', + ), + ], +) +def test_invalid_example(tmp_path, caplog, example, error_msg, value_shown_in_debug): + caplog.set_level(logging.DEBUG) + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(cleandoc(example)) + + caplog.clear() + with pytest.raises(ValueError, match="invalid pyproject.toml"): + read_configuration(pyproject) + + # Make sure the logs give guidance to the user + error_log = caplog.record_tuples[0] + assert error_log[1] == logging.ERROR + assert error_msg in error_log[2] + + debug_log = caplog.record_tuples[1] + assert debug_log[1] == logging.DEBUG + debug_msg = "".join(line.strip() for line in debug_log[2].splitlines()) + assert value_shown_in_debug in debug_msg + + +@pytest.mark.parametrize("config", ("", "[tool.something]\nvalue = 42")) +def test_empty(tmp_path, config): + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(config) + + # Make sure no error is raised + assert read_configuration(pyproject) == {} + + +@pytest.mark.parametrize("config", ("[project]\nname = 'myproj'\nversion='42'\n",)) +def test_include_package_data_by_default(tmp_path, config): + """Builds with ``pyproject.toml`` should consider ``include-package-data=True`` as + default. + """ + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(config) + + config = read_configuration(pyproject) + assert config["tool"]["setuptools"]["include-package-data"] is True + + +def test_include_package_data_in_setuppy(tmp_path): + """Builds with ``pyproject.toml`` should consider ``include_package_data`` set in + ``setup.py``. + + See https://github.com/pypa/setuptools/issues/3197#issuecomment-1079023889 + """ + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text("[project]\nname = 'myproj'\nversion='42'\n") + setuppy = tmp_path / "setup.py" + setuppy.write_text("__import__('setuptools').setup(include_package_data=False)") + + with _Path(tmp_path): + dist = distutils.core.run_setup("setup.py", {}, stop_after="config") + + assert dist.get_name() == "myproj" + assert dist.get_version() == "42" + assert dist.include_package_data is False + + +class TestSkipBadConfig: + @pytest.mark.parametrize( + "setup_attrs", + [ + {"name": "myproj"}, + {"install_requires": ["does-not-exist"]}, + ], + ) + @pytest.mark.parametrize( + "pyproject_content", + [ + "[project]\nrequires-python = '>=3.7'\n", + "[project]\nversion = '42'\nrequires-python = '>=3.7'\n", + "[project]\nname='othername'\nrequires-python = '>=3.7'\n", + ], + ) + def test_popular_config(self, tmp_path, pyproject_content, setup_attrs): + # See pypa/setuptools#3199 and pypa/cibuildwheel#1064 + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(pyproject_content) + dist = Distribution(attrs=setup_attrs) + + prev_name = dist.get_name() + prev_deps = dist.install_requires + + with pytest.warns(_InvalidFile, match=r"DO NOT include.*\[project\].* table"): + dist = apply_configuration(dist, pyproject) + + assert dist.get_name() != "othername" + assert dist.get_name() == prev_name + assert dist.python_requires is None + assert set(dist.install_requires) == set(prev_deps) diff --git a/setuptools/tests/test_config.py b/setuptools/tests/config/test_setupcfg.py index 005742e4..1f35f836 100644 --- a/setuptools/tests/test_config.py +++ b/setuptools/tests/config/test_setupcfg.py @@ -1,21 +1,20 @@ -import types -import sys - -import contextlib import configparser +import contextlib +import inspect +from pathlib import Path +from unittest.mock import Mock, patch import pytest from distutils.errors import DistutilsOptionError, DistutilsFileError -from mock import patch from setuptools.dist import Distribution, _Distribution -from setuptools.config import ConfigHandler, read_configuration -from distutils.core import Command -from .textwrap import DALS +from setuptools.config.setupcfg import ConfigHandler, read_configuration +from ..textwrap import DALS class ErrConfigHandler(ConfigHandler): """Erroneous handler. Fails to implement required methods.""" + section_prefix = "**err**" def make_package_dir(name, base_dir, ns=False): @@ -70,7 +69,7 @@ def get_dist(tmpdir, kwargs_initial=None, parse=True): def test_parsers_implemented(): with pytest.raises(NotImplementedError): - handler = ErrConfigHandler(None, {}) + handler = ErrConfigHandler(None, {}, False, Mock()) handler.parsers @@ -186,9 +185,12 @@ class TestMetadata: def test_file_sandboxed(self, tmpdir): - fake_env(tmpdir, '[metadata]\n' 'long_description = file: ../../README\n') + tmpdir.ensure("README") + project = tmpdir.join('depth1', 'depth2') + project.ensure(dir=True) + fake_env(project, '[metadata]\n' 'long_description = file: ../../README\n') - with get_dist(tmpdir, parse=False) as dist: + with get_dist(project, parse=False) as dist: with pytest.raises(DistutilsOptionError): dist.parse_config_files() # file: out of sandbox @@ -859,22 +861,25 @@ class TestOptions: dist.parse_config_files() def test_cmdclass(self, tmpdir): - class CustomCmd(Command): - pass - - m = types.ModuleType('custom_build', 'test package') - - m.__dict__['CustomCmd'] = CustomCmd - - sys.modules['custom_build'] = m - - fake_env( - tmpdir, - '[options]\n' 'cmdclass =\n' ' customcmd = custom_build.CustomCmd\n', + module_path = Path(tmpdir, "src/custom_build.py") # auto discovery for src + module_path.parent.mkdir(parents=True, exist_ok=True) + module_path.write_text( + "from distutils.core import Command\n" + "class CustomCmd(Command): pass\n" ) + setup_cfg = """ + [options] + cmdclass = + customcmd = custom_build.CustomCmd + """ + fake_env(tmpdir, inspect.cleandoc(setup_cfg)) + with get_dist(tmpdir) as dist: - assert dist.cmdclass == {'customcmd': CustomCmd} + cmdclass = dist.cmdclass['customcmd'] + assert cmdclass.__name__ == "CustomCmd" + assert cmdclass.__module__ == "custom_build" + assert module_path.samefile(inspect.getfile(cmdclass)) saved_dist_init = _Distribution.__init__ diff --git a/setuptools/tests/contexts.py b/setuptools/tests/contexts.py index 51ce8984..58948824 100644 --- a/setuptools/tests/contexts.py +++ b/setuptools/tests/contexts.py @@ -7,6 +7,7 @@ import site import io import pkg_resources +from filelock import FileLock @contextlib.contextmanager @@ -96,3 +97,29 @@ def suppress_exceptions(*excs): yield except excs: pass + + +def multiproc(request): + """ + Return True if running under xdist and multiple + workers are used. + """ + try: + worker_id = request.getfixturevalue('worker_id') + except Exception: + return False + return worker_id != 'master' + + +@contextlib.contextmanager +def session_locked_tmp_dir(request, tmp_path_factory, name): + """Uses a file lock to guarantee only one worker can access a temp dir""" + # get the temp directory shared by all workers + base = tmp_path_factory.getbasetemp() + shared_dir = base.parent if multiproc(request) else base + + locked_dir = shared_dir / name + with FileLock(locked_dir.with_suffix(".lock")): + # ^-- prevent multiple workers to access the directory at once + locked_dir.mkdir(exist_ok=True, parents=True) + yield locked_dir diff --git a/setuptools/tests/environment.py b/setuptools/tests/environment.py index c0274c33..bcf29601 100644 --- a/setuptools/tests/environment.py +++ b/setuptools/tests/environment.py @@ -1,9 +1,38 @@ import os import sys +import subprocess import unicodedata - from subprocess import Popen as _Popen, PIPE as _PIPE +import jaraco.envs + + +class VirtualEnv(jaraco.envs.VirtualEnv): + name = '.env' + # Some version of PyPy will import distutils on startup, implicitly + # importing setuptools, and thus leading to BackendInvalid errors + # when upgrading Setuptools. Bypass this behavior by avoiding the + # early availability and need to upgrade. + create_opts = ['--no-setuptools'] + + def run(self, cmd, *args, **kwargs): + cmd = [self.exe(cmd[0])] + cmd[1:] + kwargs = {"cwd": self.root, **kwargs} # Allow overriding + # In some environments (eg. downstream distro packaging), where: + # - tox isn't used to run tests and + # - PYTHONPATH is set to point to a specific setuptools codebase and + # - no custom env is explicitly set by a test + # PYTHONPATH will leak into the spawned processes. + # In that case tests look for module in the wrong place (on PYTHONPATH). + # Unless the test sets its own special env, pass a copy of the existing + # environment with removed PYTHONPATH to the subprocesses. + if "env" not in kwargs: + env = dict(os.environ) + if "PYTHONPATH" in env: + del env["PYTHONPATH"] + kwargs["env"] = env + return subprocess.check_output(cmd, *args, **kwargs) + def _which_dirs(cmd): result = set() diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py index a5a172e0..25ab49fd 100644 --- a/setuptools/tests/fixtures.py +++ b/setuptools/tests/fixtures.py @@ -1,11 +1,13 @@ +import os import contextlib import sys -import shutil import subprocess +from pathlib import Path import pytest +import path -from . import contexts +from . import contexts, environment @pytest.fixture @@ -28,22 +30,6 @@ def tmpdir_cwd(tmpdir): yield orig -@pytest.fixture -def tmp_src(request, tmp_path): - """Make a copy of the source dir under `$tmp/src`. - - This fixture is useful whenever it's necessary to run `setup.py` - or `pip install` against the source directory when there's no - control over the number of simultaneous invocations. Such - concurrent runs create and delete directories with the same names - under the target directory and so they influence each other's runs - when they are not being executed sequentially. - """ - tmp_src_path = tmp_path / 'src' - shutil.copytree(request.config.rootdir, tmp_src_path) - return tmp_src_path - - @pytest.fixture(autouse=True, scope="session") def workaround_xdist_376(request): """ @@ -72,3 +58,83 @@ def sample_project(tmp_path): except Exception: pytest.skip("Unable to clone sampleproject") return tmp_path / 'sampleproject' + + +# sdist and wheel artifacts should be stable across a round of tests +# so we can build them once per session and use the files as "readonly" + + +@pytest.fixture(scope="session") +def setuptools_sdist(tmp_path_factory, request): + if os.getenv("PRE_BUILT_SETUPTOOLS_SDIST"): + return Path(os.getenv("PRE_BUILT_SETUPTOOLS_SDIST")).resolve() + + with contexts.session_locked_tmp_dir( + request, tmp_path_factory, "sdist_build") as tmp: + dist = next(tmp.glob("*.tar.gz"), None) + if dist: + return dist + + subprocess.check_call([ + sys.executable, "-m", "build", "--sdist", + "--outdir", str(tmp), str(request.config.rootdir) + ]) + return next(tmp.glob("*.tar.gz")) + + +@pytest.fixture(scope="session") +def setuptools_wheel(tmp_path_factory, request): + if os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL"): + return Path(os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL")).resolve() + + with contexts.session_locked_tmp_dir( + request, tmp_path_factory, "wheel_build") as tmp: + dist = next(tmp.glob("*.whl"), None) + if dist: + return dist + + subprocess.check_call([ + sys.executable, "-m", "build", "--wheel", + "--outdir", str(tmp) , str(request.config.rootdir) + ]) + return next(tmp.glob("*.whl")) + + +@pytest.fixture +def venv(tmp_path, setuptools_wheel): + """Virtual env with the version of setuptools under test installed""" + env = environment.VirtualEnv() + env.root = path.Path(tmp_path / 'venv') + env.req = str(setuptools_wheel) + # In some environments (eg. downstream distro packaging), + # where tox isn't used to run tests and PYTHONPATH is set to point to + # a specific setuptools codebase, PYTHONPATH will leak into the spawned + # processes. + # env.create() should install the just created setuptools + # wheel, but it doesn't if it finds another existing matching setuptools + # installation present on PYTHONPATH: + # `setuptools is already installed with the same version as the provided + # wheel. Use --force-reinstall to force an installation of the wheel.` + # This prevents leaking PYTHONPATH to the created environment. + with contexts.environment(PYTHONPATH=None): + return env.create() + + +@pytest.fixture +def venv_without_setuptools(tmp_path): + """Virtual env without any version of setuptools installed""" + env = environment.VirtualEnv() + env.root = path.Path(tmp_path / 'venv_without_setuptools') + env.create_opts = ['--no-setuptools'] + env.ensure_env() + return env + + +@pytest.fixture +def bare_venv(tmp_path): + """Virtual env without any common packages installed""" + env = environment.VirtualEnv() + env.root = path.Path(tmp_path / 'bare_venv') + env.create_opts = ['--no-setuptools', '--no-pip', '--no-wheel', '--no-seed'] + env.ensure_env() + return env diff --git a/setuptools/tests/integration/__init__.py b/setuptools/tests/integration/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/tests/integration/__init__.py diff --git a/setuptools/tests/integration/helpers.py b/setuptools/tests/integration/helpers.py new file mode 100644 index 00000000..24c02be0 --- /dev/null +++ b/setuptools/tests/integration/helpers.py @@ -0,0 +1,75 @@ +"""Reusable functions and classes for different types of integration tests. + +For example ``Archive`` can be used to check the contents of distribution built +with setuptools, and ``run`` will always try to be as verbose as possible to +facilitate debugging. +""" +import os +import subprocess +import tarfile +from zipfile import ZipFile +from pathlib import Path + + +def run(cmd, env=None): + r = subprocess.run( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + env={**os.environ, **(env or {})} + # ^-- allow overwriting instead of discarding the current env + ) + + out = r.stdout + "\n" + r.stderr + # pytest omits stdout/err by default, if the test fails they help debugging + print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + print(f"Command: {cmd}\nreturn code: {r.returncode}\n\n{out}") + + if r.returncode == 0: + return out + raise subprocess.CalledProcessError(r.returncode, cmd, r.stdout, r.stderr) + + +class Archive: + """Compatibility layer for ZipFile/Info and TarFile/Info""" + def __init__(self, filename): + self._filename = filename + if filename.endswith("tar.gz"): + self._obj = tarfile.open(filename, "r:gz") + elif filename.endswith("zip"): + self._obj = ZipFile(filename) + else: + raise ValueError(f"{filename} doesn't seem to be a zip or tar.gz") + + def __iter__(self): + if hasattr(self._obj, "infolist"): + return iter(self._obj.infolist()) + return iter(self._obj) + + def get_name(self, zip_or_tar_info): + if hasattr(zip_or_tar_info, "filename"): + return zip_or_tar_info.filename + return zip_or_tar_info.name + + def get_content(self, zip_or_tar_info): + if hasattr(self._obj, "extractfile"): + content = self._obj.extractfile(zip_or_tar_info) + if content is None: + msg = f"Invalid {zip_or_tar_info.name} in {self._filename}" + raise ValueError(msg) + return str(content.read(), "utf-8") + return str(self._obj.read(zip_or_tar_info), "utf-8") + + +def get_sdist_members(sdist_path): + with tarfile.open(sdist_path, "r:gz") as tar: + files = [Path(f) for f in tar.getnames()] + # remove root folder + relative_files = ("/".join(f.parts[1:]) for f in files) + return {f for f in relative_files if f} + + +def get_wheel_members(wheel_path): + with ZipFile(wheel_path) as zipfile: + return set(zipfile.namelist()) diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py new file mode 100644 index 00000000..9d11047b --- /dev/null +++ b/setuptools/tests/integration/test_pip_install_sdist.py @@ -0,0 +1,219 @@ +"""Integration tests for setuptools that focus on building packages via pip. + +The idea behind these tests is not to exhaustively check all the possible +combinations of packages, operating systems, supporting libraries, etc, but +rather check a limited number of popular packages and how they interact with +the exposed public API. This way if any change in API is introduced, we hope to +identify backward compatibility problems before publishing a release. + +The number of tested packages is purposefully kept small, to minimise duration +and the associated maintenance cost (changes in the way these packages define +their build process may require changes in the tests). +""" +import json +import os +import shutil +import sys +from enum import Enum +from glob import glob +from hashlib import md5 +from urllib.request import urlopen + +import pytest +from packaging.requirements import Requirement + +from .helpers import Archive, run + + +pytestmark = pytest.mark.integration + +LATEST, = list(Enum("v", "LATEST")) +"""Default version to be checked""" +# There are positive and negative aspects of checking the latest version of the +# packages. +# The main positive aspect is that the latest version might have already +# removed the use of APIs deprecated in previous releases of setuptools. + + +# Packages to be tested: +# (Please notice the test environment cannot support EVERY library required for +# compiling binary extensions. In Ubuntu/Debian nomenclature, we only assume +# that `build-essential`, `gfortran` and `libopenblas-dev` are installed, +# due to their relevance to the numerical/scientific programming ecosystem) +EXAMPLES = [ + ("pandas", LATEST), # cython + custom build_ext + ("sphinx", LATEST), # custom setup.py + ("pip", LATEST), # just in case... + ("pytest", LATEST), # uses setuptools_scm + ("mypy", LATEST), # custom build_py + ext_modules + + # --- Popular packages: https://hugovk.github.io/top-pypi-packages/ --- + ("botocore", LATEST), + ("kiwisolver", "1.3.2"), # build_ext, version pinned due to setup_requires + ("brotli", LATEST), # not in the list but used by urllib3 + + # When adding packages to this list, make sure they expose a `__version__` + # attribute, or modify the tests below +] + + +# Some packages have "optional" dependencies that modify their build behaviour +# and are not listed in pyproject.toml, others still use `setup_requires` +EXTRA_BUILD_DEPS = { + "sphinx": ("babel>=1.3",), + "kiwisolver": ("cppy>=1.1.0",) +} + + +VIRTUALENV = (sys.executable, "-m", "virtualenv") + + +# By default, pip will try to build packages in isolation (PEP 517), which +# means it will download the previous stable version of setuptools. +# `pip` flags can avoid that (the version of setuptools under test +# should be the one to be used) +SDIST_OPTIONS = ( + "--ignore-installed", + "--no-build-isolation", + # We don't need "--no-binary :all:" since we specify the path to the sdist. + # It also helps with performance, since dependencies can come from wheels. +) +# The downside of `--no-build-isolation` is that pip will not download build +# dependencies. The test script will have to also handle that. + + +@pytest.fixture +def venv_python(tmp_path): + run([*VIRTUALENV, str(tmp_path / ".venv")]) + possible_path = (str(p.parent) for p in tmp_path.glob(".venv/*/python*")) + return shutil.which("python", path=os.pathsep.join(possible_path)) + + +@pytest.fixture(autouse=True) +def _prepare(tmp_path, venv_python, monkeypatch, request): + download_path = os.getenv("DOWNLOAD_PATH", str(tmp_path)) + os.makedirs(download_path, exist_ok=True) + + # Environment vars used for building some of the packages + monkeypatch.setenv("USE_MYPYC", "1") + + def _debug_info(): + # Let's provide the maximum amount of information possible in the case + # it is necessary to debug the tests directly from the CI logs. + print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + print("Temporary directory:") + map(print, tmp_path.glob("*")) + print("Virtual environment:") + run([venv_python, "-m", "pip", "freeze"]) + request.addfinalizer(_debug_info) + + +ALREADY_LOADED = ("pytest", "mypy") # loaded by pytest/pytest-enabler + + +@pytest.mark.parametrize('package, version', EXAMPLES) +@pytest.mark.uses_network +def test_install_sdist(package, version, tmp_path, venv_python, setuptools_wheel): + venv_pip = (venv_python, "-m", "pip") + sdist = retrieve_sdist(package, version, tmp_path) + deps = build_deps(package, sdist) + if deps: + print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + print("Dependencies:", deps) + run([*venv_pip, "install", *deps]) + + # Use a virtualenv to simulate PEP 517 isolation + # but install fresh setuptools wheel to ensure the version under development + run([*venv_pip, "install", "-I", setuptools_wheel]) + run([*venv_pip, "install", *SDIST_OPTIONS, sdist]) + + # Execute a simple script to make sure the package was installed correctly + script = f"import {package}; print(getattr({package}, '__version__', 0))" + run([venv_python, "-c", script]) + + +# ---- Helper Functions ---- + + +def retrieve_sdist(package, version, tmp_path): + """Either use cached sdist file or download it from PyPI""" + # `pip download` cannot be used due to + # https://github.com/pypa/pip/issues/1884 + # https://discuss.python.org/t/pep-625-file-name-of-a-source-distribution/4686 + # We have to find the correct distribution file and download it + download_path = os.getenv("DOWNLOAD_PATH", str(tmp_path)) + dist = retrieve_pypi_sdist_metadata(package, version) + + # Remove old files to prevent cache to grow indefinitely + for file in glob(os.path.join(download_path, f"{package}*")): + if dist["filename"] != file: + os.unlink(file) + + dist_file = os.path.join(download_path, dist["filename"]) + if not os.path.exists(dist_file): + download(dist["url"], dist_file, dist["md5_digest"]) + return dist_file + + +def retrieve_pypi_sdist_metadata(package, version): + # https://warehouse.pypa.io/api-reference/json.html + id_ = package if version is LATEST else f"{package}/{version}" + with urlopen(f"https://pypi.org/pypi/{id_}/json") as f: + metadata = json.load(f) + + if metadata["info"]["yanked"]: + raise ValueError(f"Release for {package} {version} was yanked") + + version = metadata["info"]["version"] + release = metadata["releases"][version] + dists = [d for d in release if d["packagetype"] == "sdist"] + if len(dists) == 0: + raise ValueError(f"No sdist found for {package} {version}") + + for dist in dists: + if dist["filename"].endswith(".tar.gz"): + return dist + + # Not all packages are publishing tar.gz + return dist + + +def download(url, dest, md5_digest): + with urlopen(url) as f: + data = f.read() + + assert md5(data).hexdigest() == md5_digest + + with open(dest, "wb") as f: + f.write(data) + + assert os.path.exists(dest) + + +def build_deps(package, sdist_file): + """Find out what are the build dependencies for a package. + + We need to "manually" install them, since pip will not install build + deps with `--no-build-isolation`. + """ + import tomli as toml + + # delay importing, since pytest discovery phase may hit this file from a + # testenv without tomli + + archive = Archive(sdist_file) + pyproject = _read_pyproject(archive) + + info = toml.loads(pyproject) + deps = info.get("build-system", {}).get("requires", []) + deps += EXTRA_BUILD_DEPS.get(package, []) + # Remove setuptools from requirements (and deduplicate) + requirements = {Requirement(d).name: d for d in deps} + return [v for k, v in requirements.items() if k != "setuptools"] + + +def _read_pyproject(archive): + for member in archive: + if os.path.basename(archive.get_name(member)) == "pyproject.toml": + return archive.get_content(member) + return "" diff --git a/setuptools/tests/requirements.txt b/setuptools/tests/requirements.txt deleted file mode 100644 index b2d84a94..00000000 --- a/setuptools/tests/requirements.txt +++ /dev/null @@ -1,14 +0,0 @@ -mock -pytest-flake8 -flake8-2020; python_version>="3.6" -virtualenv>=13.0.0 -pytest-virtualenv>=1.2.7 -pytest>=3.7 -wheel -coverage>=4.5.1 -pytest-cov>=2.5.1 -paver; python_version>="3.6" -futures; python_version=="2.7" -pip>=19.1 # For proper file:// URLs support. -jaraco.envs -sphinx diff --git a/setuptools/tests/test_bdist_deprecations.py b/setuptools/tests/test_bdist_deprecations.py index 28482fd0..1a900c67 100644 --- a/setuptools/tests/test_bdist_deprecations.py +++ b/setuptools/tests/test_bdist_deprecations.py @@ -11,7 +11,7 @@ from setuptools import SetuptoolsDeprecationWarning @pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only') @mock.patch('distutils.command.bdist_rpm.bdist_rpm') -def test_bdist_rpm_warning(distutils_cmd): +def test_bdist_rpm_warning(distutils_cmd, tmpdir_cwd): dist = Distribution( dict( script_name='setup.py', diff --git a/setuptools/tests/test_bdist_egg.py b/setuptools/tests/test_bdist_egg.py index fb5b90b1..67f788cc 100644 --- a/setuptools/tests/test_bdist_egg.py +++ b/setuptools/tests/test_bdist_egg.py @@ -13,7 +13,7 @@ from . import contexts SETUP_PY = """\ from setuptools import setup -setup(name='foo', py_modules=['hi']) +setup(py_modules=['hi']) """ @@ -52,7 +52,6 @@ class Test: dist = Distribution(dict( script_name='setup.py', script_args=['bdist_egg', '--exclude-source-files'], - name='foo', py_modules=['hi'], )) with contexts.quiet(): diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index 0f4a1a73..36940e76 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -1,15 +1,32 @@ import os +import sys import shutil +import signal import tarfile import importlib +import contextlib from concurrent import futures import re +from zipfile import ZipFile import pytest from jaraco import path from .textwrap import DALS +SETUP_SCRIPT_STUB = "__import__('setuptools').setup()" + + +TIMEOUT = int(os.getenv("TIMEOUT_BACKEND_TEST", "180")) # in seconds +IS_PYPY = '__pypy__' in sys.builtin_module_names + + +pytestmark = pytest.mark.skipif( + sys.platform == "win32" and IS_PYPY, + reason="The combination of PyPy + Windows + pytest-xdist + ProcessPoolExecutor " + "is flaky and problematic" +) + class BuildBackendBase: def __init__(self, cwd='.', env={}, backend_name='setuptools.build_meta'): @@ -31,10 +48,27 @@ class BuildBackend(BuildBackendBase): def method(*args, **kw): root = os.path.abspath(self.cwd) caller = BuildBackendCaller(root, self.env, self.backend_name) - return self.pool.submit(caller, name, *args, **kw).result() + pid = None + try: + pid = self.pool.submit(os.getpid).result(TIMEOUT) + return self.pool.submit(caller, name, *args, **kw).result(TIMEOUT) + except futures.TimeoutError: + self.pool.shutdown(wait=False) # doesn't stop already running processes + self._kill(pid) + pytest.xfail(f"Backend did not respond before timeout ({TIMEOUT} s)") + except (futures.process.BrokenProcessPool, MemoryError, OSError): + if IS_PYPY: + pytest.xfail("PyPy frequently fails tests with ProcessPoolExector") + raise return method + def _kill(self, pid): + if pid is None: + return + with contextlib.suppress(ProcessLookupError, OSError): + os.kill(pid, signal.SIGTERM if os.name == "nt" else signal.SIGKILL) + class BuildBackendCaller(BuildBackendBase): def __init__(self, *args, **kwargs): @@ -58,7 +92,7 @@ class BuildBackendCaller(BuildBackendBase): defns = [ - { + { # simple setup.py script 'setup.py': DALS(""" __import__('setuptools').setup( name='foo', @@ -72,7 +106,7 @@ defns = [ print('hello') """), }, - { + { # setup.py that relies on __name__ 'setup.py': DALS(""" assert __name__ == '__main__' __import__('setuptools').setup( @@ -87,7 +121,7 @@ defns = [ print('hello') """), }, - { + { # setup.py script that runs arbitrary code 'setup.py': DALS(""" variable = True def function(): @@ -105,7 +139,30 @@ defns = [ print('hello') """), }, - { + { # setup.py script that constructs temp files to be included in the distribution + 'setup.py': DALS(""" + # Some packages construct files on the fly, include them in the package, + # and immediately remove them after `setup()` (e.g. pybind11==2.9.1). + # Therefore, we cannot use `distutils.core.run_setup(..., stop_after=...)` + # to obtain a distribution object first, and then run the distutils + # commands later, because these files will be removed in the meantime. + + with open('world.py', 'w') as f: + f.write('x = 42') + + try: + __import__('setuptools').setup( + name='foo', + version='0.0.0', + py_modules=['world'], + setup_requires=['six'], + ) + finally: + # Some packages will clean temporary files + __import__('os').unlink('world.py') + """), + }, + { # setup.cfg only 'setup.cfg': DALS(""" [metadata] name = foo @@ -120,6 +177,22 @@ defns = [ print('hello') """) }, + { # setup.cfg and setup.py + 'setup.cfg': DALS(""" + [metadata] + name = foo + version = 0.0.0 + + [options] + py_modules=hello + setup_requires=six + """), + 'setup.py': "__import__('setuptools').setup()", + 'hello.py': DALS(""" + def run(): + print('hello') + """) + }, ] @@ -150,7 +223,20 @@ class TestBuildMetaBackend: os.makedirs(dist_dir) wheel_name = build_backend.build_wheel(dist_dir) - assert os.path.isfile(os.path.join(dist_dir, wheel_name)) + wheel_file = os.path.join(dist_dir, wheel_name) + assert os.path.isfile(wheel_file) + + # Temporary files should be removed + assert not os.path.isfile('world.py') + + with ZipFile(wheel_file) as zipfile: + wheel_contents = set(zipfile.namelist()) + + # Each one of the examples have a single module + # that should be included in the distribution + python_scripts = (f for f in wheel_contents if f.endswith('.py')) + modules = [f for f in python_scripts if not f.endswith('setup.py')] + assert len(modules) == 1 @pytest.mark.parametrize('build_type', ('wheel', 'sdist')) def test_build_with_existing_file_present(self, build_type, tmpdir_cwd): @@ -199,6 +285,190 @@ class TestBuildMetaBackend: assert third_result == second_result assert os.path.getsize(os.path.join(dist_dir, third_result)) > 0 + @pytest.mark.parametrize("setup_script", [None, SETUP_SCRIPT_STUB]) + def test_build_with_pyproject_config(self, tmpdir, setup_script): + files = { + 'pyproject.toml': DALS(""" + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + + [project] + name = "foo" + license = {text = "MIT"} + description = "This is a Python package" + dynamic = ["version", "readme"] + classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers" + ] + urls = {Homepage = "http://github.com"} + dependencies = [ + "appdirs", + ] + + [project.optional-dependencies] + all = [ + "tomli>=1", + "pyscaffold>=4,<5", + 'importlib; python_version == "2.6"', + ] + + [project.scripts] + foo = "foo.cli:main" + + [tool.setuptools] + zip-safe = false + package-dir = {"" = "src"} + packages = {find = {where = ["src"]}} + license-files = ["LICENSE*"] + + [tool.setuptools.dynamic] + version = {attr = "foo.__version__"} + readme = {file = "README.rst"} + + [tool.distutils.sdist] + formats = "gztar" + + [tool.distutils.bdist_wheel] + universal = true + """), + "MANIFEST.in": DALS(""" + global-include *.py *.txt + global-exclude *.py[cod] + """), + "README.rst": "This is a ``README``", + "LICENSE.txt": "---- placeholder MIT license ----", + "src": { + "foo": { + "__init__.py": "__version__ = '0.1'", + "cli.py": "def main(): print('hello world')", + "data.txt": "def main(): print('hello world')", + } + } + } + if setup_script: + files["setup.py"] = setup_script + + build_backend = self.get_build_backend() + with tmpdir.as_cwd(): + path.build(files) + sdist_path = build_backend.build_sdist("temp") + wheel_file = build_backend.build_wheel("temp") + + with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar: + sdist_contents = set(tar.getnames()) + + with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile: + wheel_contents = set(zipfile.namelist()) + metadata = str(zipfile.read("foo-0.1.dist-info/METADATA"), "utf-8") + license = str(zipfile.read("foo-0.1.dist-info/LICENSE.txt"), "utf-8") + epoints = str(zipfile.read("foo-0.1.dist-info/entry_points.txt"), "utf-8") + + assert sdist_contents - {"foo-0.1/setup.py"} == { + 'foo-0.1', + 'foo-0.1/LICENSE.txt', + 'foo-0.1/MANIFEST.in', + 'foo-0.1/PKG-INFO', + 'foo-0.1/README.rst', + 'foo-0.1/pyproject.toml', + 'foo-0.1/setup.cfg', + 'foo-0.1/src', + 'foo-0.1/src/foo', + 'foo-0.1/src/foo/__init__.py', + 'foo-0.1/src/foo/cli.py', + 'foo-0.1/src/foo/data.txt', + 'foo-0.1/src/foo.egg-info', + 'foo-0.1/src/foo.egg-info/PKG-INFO', + 'foo-0.1/src/foo.egg-info/SOURCES.txt', + 'foo-0.1/src/foo.egg-info/dependency_links.txt', + 'foo-0.1/src/foo.egg-info/entry_points.txt', + 'foo-0.1/src/foo.egg-info/requires.txt', + 'foo-0.1/src/foo.egg-info/top_level.txt', + 'foo-0.1/src/foo.egg-info/not-zip-safe', + } + assert wheel_contents == { + "foo/__init__.py", + "foo/cli.py", + "foo/data.txt", # include_package_data defaults to True + "foo-0.1.dist-info/LICENSE.txt", + "foo-0.1.dist-info/METADATA", + "foo-0.1.dist-info/WHEEL", + "foo-0.1.dist-info/entry_points.txt", + "foo-0.1.dist-info/top_level.txt", + "foo-0.1.dist-info/RECORD", + } + assert license == "---- placeholder MIT license ----" + for line in ( + "Summary: This is a Python package", + "License: MIT", + "Classifier: Intended Audience :: Developers", + "Requires-Dist: appdirs", + "Requires-Dist: tomli (>=1) ; extra == 'all'", + "Requires-Dist: importlib ; (python_version == \"2.6\") and extra == 'all'" + ): + assert line in metadata + + assert metadata.strip().endswith("This is a ``README``") + assert epoints.strip() == "[console_scripts]\nfoo = foo.cli:main" + + def test_static_metadata_in_pyproject_config(self, tmpdir): + # Make sure static metadata in pyproject.toml is not overwritten by setup.py + # as required by PEP 621 + files = { + 'pyproject.toml': DALS(""" + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + + [project] + name = "foo" + description = "This is a Python package" + version = "42" + dependencies = ["six"] + """), + 'hello.py': DALS(""" + def run(): + print('hello') + """), + 'setup.py': DALS(""" + __import__('setuptools').setup( + name='bar', + version='13', + ) + """), + } + build_backend = self.get_build_backend() + with tmpdir.as_cwd(): + path.build(files) + sdist_path = build_backend.build_sdist("temp") + wheel_file = build_backend.build_wheel("temp") + + assert (tmpdir / "temp/foo-42.tar.gz").exists() + assert (tmpdir / "temp/foo-42-py3-none-any.whl").exists() + assert not (tmpdir / "temp/bar-13.tar.gz").exists() + assert not (tmpdir / "temp/bar-42.tar.gz").exists() + assert not (tmpdir / "temp/foo-13.tar.gz").exists() + assert not (tmpdir / "temp/bar-13-py3-none-any.whl").exists() + assert not (tmpdir / "temp/bar-42-py3-none-any.whl").exists() + assert not (tmpdir / "temp/foo-13-py3-none-any.whl").exists() + + with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar: + pkg_info = str(tar.extractfile('foo-42/PKG-INFO').read(), "utf-8") + members = tar.getnames() + assert "bar-13/PKG-INFO" not in members + + with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile: + metadata = str(zipfile.read("foo-42.dist-info/METADATA"), "utf-8") + members = zipfile.namelist() + assert "bar-13.dist-info/METADATA" not in members + + for file in pkg_info, metadata: + for line in ("Name: foo", "Version: 42"): + assert line in file + for line in ("Name: bar", "Version: 13"): + assert line not in file + def test_build_sdist(self, build_backend): dist_dir = os.path.abspath('pip-sdist') os.makedirs(dist_dir) @@ -392,6 +662,30 @@ class TestBuildMetaBackend: assert expected == sorted(actual) + def test_setup_requires_with_auto_discovery(self, tmpdir_cwd): + # Make sure patches introduced to retrieve setup_requires don't accidentally + # activate auto-discovery and cause problems due to the incomplete set of + # attributes passed to MinimalDistribution + files = { + 'pyproject.toml': DALS(""" + [project] + name = "proj" + version = "42" + """), + "setup.py": DALS(""" + __import__('setuptools').setup( + setup_requires=["foo"], + py_modules = ["hello", "world"] + ) + """), + 'hello.py': "'hello'", + 'world.py': "'world'", + } + path.build(files) + build_backend = self.get_build_backend() + setup_requires = build_backend.get_requires_for_build_wheel() + assert setup_requires == ["wheel", "foo"] + def test_dont_install_setup_requires(self, tmpdir_cwd): files = { 'setup.py': DALS(""" diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py index 78a31ac4..19c8b780 100644 --- a/setuptools/tests/test_build_py.py +++ b/setuptools/tests/test_build_py.py @@ -18,7 +18,6 @@ def test_directories_in_package_data_glob(tmpdir_cwd): script_name='setup.py', script_args=['build_py'], packages=[''], - name='foo', package_data={'': ['path/*']}, )) os.makedirs('path/subpath') @@ -40,7 +39,6 @@ def test_read_only(tmpdir_cwd): script_args=['build_py'], packages=['pkg'], package_data={'pkg': ['data.dat']}, - name='pkg', )) os.makedirs('pkg') open('pkg/__init__.py', 'w').close() @@ -70,7 +68,6 @@ def test_executable_data(tmpdir_cwd): script_args=['build_py'], packages=['pkg'], package_data={'pkg': ['run-me']}, - name='pkg', )) os.makedirs('pkg') open('pkg/__init__.py', 'w').close() diff --git a/setuptools/tests/test_config_discovery.py b/setuptools/tests/test_config_discovery.py new file mode 100644 index 00000000..fac365f4 --- /dev/null +++ b/setuptools/tests/test_config_discovery.py @@ -0,0 +1,581 @@ +import os +import sys +from configparser import ConfigParser +from itertools import product + +from setuptools.command.sdist import sdist +from setuptools.dist import Distribution +from setuptools.discovery import find_package_path, find_parent_package +from setuptools.errors import PackageDiscoveryError + +import setuptools # noqa -- force distutils.core to be patched +import distutils.core + +import pytest +import jaraco.path +from path import Path as _Path + +from .contexts import quiet +from .integration.helpers import get_sdist_members, get_wheel_members, run +from .textwrap import DALS + + +class TestFindParentPackage: + def test_single_package(self, tmp_path): + # find_parent_package should find a non-namespace parent package + (tmp_path / "src/namespace/pkg/nested").mkdir(exist_ok=True, parents=True) + (tmp_path / "src/namespace/pkg/nested/__init__.py").touch() + (tmp_path / "src/namespace/pkg/__init__.py").touch() + packages = ["namespace", "namespace.pkg", "namespace.pkg.nested"] + assert find_parent_package(packages, {"": "src"}, tmp_path) == "namespace.pkg" + + def test_multiple_toplevel(self, tmp_path): + # find_parent_package should return null if the given list of packages does not + # have a single parent package + multiple = ["pkg", "pkg1", "pkg2"] + for name in multiple: + (tmp_path / f"src/{name}").mkdir(exist_ok=True, parents=True) + (tmp_path / f"src/{name}/__init__.py").touch() + assert find_parent_package(multiple, {"": "src"}, tmp_path) is None + + +class TestDiscoverPackagesAndPyModules: + """Make sure discovered values for ``packages`` and ``py_modules`` work + similarly to explicit configuration for the simple scenarios. + """ + OPTIONS = { + # Different options according to the circumstance being tested + "explicit-src": { + "package_dir": {"": "src"}, + "packages": ["pkg"] + }, + "variation-lib": { + "package_dir": {"": "lib"}, # variation of the source-layout + }, + "explicit-flat": { + "packages": ["pkg"] + }, + "explicit-single_module": { + "py_modules": ["pkg"] + }, + "explicit-namespace": { + "packages": ["ns", "ns.pkg"] + }, + "automatic-src": {}, + "automatic-flat": {}, + "automatic-single_module": {}, + "automatic-namespace": {} + } + FILES = { + "src": ["src/pkg/__init__.py", "src/pkg/main.py"], + "lib": ["lib/pkg/__init__.py", "lib/pkg/main.py"], + "flat": ["pkg/__init__.py", "pkg/main.py"], + "single_module": ["pkg.py"], + "namespace": ["ns/pkg/__init__.py"] + } + + def _get_info(self, circumstance): + _, _, layout = circumstance.partition("-") + files = self.FILES[layout] + options = self.OPTIONS[circumstance] + return files, options + + @pytest.mark.parametrize("circumstance", OPTIONS.keys()) + def test_sdist_filelist(self, tmp_path, circumstance): + files, options = self._get_info(circumstance) + _populate_project_dir(tmp_path, files, options) + + _, cmd = _run_sdist_programatically(tmp_path, options) + + manifest = [f.replace(os.sep, "/") for f in cmd.filelist.files] + for file in files: + assert any(f.endswith(file) for f in manifest) + + @pytest.mark.parametrize("circumstance", OPTIONS.keys()) + def test_project(self, tmp_path, circumstance): + files, options = self._get_info(circumstance) + _populate_project_dir(tmp_path, files, options) + + # Simulate a pre-existing `build` directory + (tmp_path / "build").mkdir() + (tmp_path / "build/lib").mkdir() + (tmp_path / "build/bdist.linux-x86_64").mkdir() + (tmp_path / "build/bdist.linux-x86_64/file.py").touch() + (tmp_path / "build/lib/__init__.py").touch() + (tmp_path / "build/lib/file.py").touch() + (tmp_path / "dist").mkdir() + (tmp_path / "dist/file.py").touch() + + _run_build(tmp_path) + + sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz"))) + print("~~~~~ sdist_members ~~~~~") + print('\n'.join(sdist_files)) + assert sdist_files >= set(files) + + wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl"))) + print("~~~~~ wheel_members ~~~~~") + print('\n'.join(wheel_files)) + orig_files = {f.replace("src/", "").replace("lib/", "") for f in files} + assert wheel_files >= orig_files + + # Make sure build files are not included by mistake + for file in wheel_files: + assert "build" not in files + assert "dist" not in files + + PURPOSEFULLY_EMPY = { + "setup.cfg": DALS( + """ + [metadata] + name = myproj + version = 0.0.0 + + [options] + {param} = + """ + ), + "setup.py": DALS( + """ + __import__('setuptools').setup( + name="myproj", + version="0.0.0", + {param}=[] + ) + """ + ), + "pyproject.toml": DALS( + """ + [build-system] + requires = [] + build-backend = 'setuptools.build_meta' + + [project] + name = "myproj" + version = "0.0.0" + + [tool.setuptools] + {param} = [] + """ + ), + "template-pyproject.toml": DALS( + """ + [build-system] + requires = [] + build-backend = 'setuptools.build_meta' + """ + ) + } + + @pytest.mark.parametrize( + "config_file, param, circumstance", + product( + ["setup.cfg", "setup.py", "pyproject.toml"], + ["packages", "py_modules"], + FILES.keys() + ) + ) + def test_purposefully_empty(self, tmp_path, config_file, param, circumstance): + files = self.FILES[circumstance] + ["mod.py", "other.py", "src/pkg/__init__.py"] + _populate_project_dir(tmp_path, files, {}) + + if config_file == "pyproject.toml": + template_param = param.replace("_", "-") + else: + # Make sure build works with or without setup.cfg + pyproject = self.PURPOSEFULLY_EMPY["template-pyproject.toml"] + (tmp_path / "pyproject.toml").write_text(pyproject) + template_param = param + + config = self.PURPOSEFULLY_EMPY[config_file].format(param=template_param) + (tmp_path / config_file).write_text(config) + + dist = _get_dist(tmp_path, {}) + # When either parameter package or py_modules is an empty list, + # then there should be no discovery + assert getattr(dist, param) == [] + other = {"py_modules": "packages", "packages": "py_modules"}[param] + assert getattr(dist, other) is None + + @pytest.mark.parametrize( + "extra_files, pkgs", + [ + (["venv/bin/simulate_venv"], {"pkg"}), + (["pkg-stubs/__init__.pyi"], {"pkg", "pkg-stubs"}), + (["other-stubs/__init__.pyi"], {"pkg", "other-stubs"}), + ( + # Type stubs can also be namespaced + ["namespace-stubs/pkg/__init__.pyi"], + {"pkg", "namespace-stubs", "namespace-stubs.pkg"}, + ), + ( + # Just the top-level package can have `-stubs`, ignore nested ones + ["namespace-stubs/pkg-stubs/__init__.pyi"], + {"pkg", "namespace-stubs"} + ), + (["_hidden/file.py"], {"pkg"}), + (["news/finalize.py"], {"pkg"}), + ] + ) + def test_flat_layout_with_extra_files(self, tmp_path, extra_files, pkgs): + files = self.FILES["flat"] + extra_files + _populate_project_dir(tmp_path, files, {}) + dist = _get_dist(tmp_path, {}) + assert set(dist.packages) == pkgs + + @pytest.mark.parametrize( + "extra_files", + [ + ["other/__init__.py"], + ["other/finalize.py"], + ] + ) + def test_flat_layout_with_dangerous_extra_files(self, tmp_path, extra_files): + files = self.FILES["flat"] + extra_files + _populate_project_dir(tmp_path, files, {}) + with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"): + _get_dist(tmp_path, {}) + + def test_flat_layout_with_single_module(self, tmp_path): + files = self.FILES["single_module"] + ["invalid-module-name.py"] + _populate_project_dir(tmp_path, files, {}) + dist = _get_dist(tmp_path, {}) + assert set(dist.py_modules) == {"pkg"} + + def test_flat_layout_with_multiple_modules(self, tmp_path): + files = self.FILES["single_module"] + ["valid_module_name.py"] + _populate_project_dir(tmp_path, files, {}) + with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"): + _get_dist(tmp_path, {}) + + +class TestNoConfig: + DEFAULT_VERSION = "0.0.0" # Default version given by setuptools + + EXAMPLES = { + "pkg1": ["src/pkg1.py"], + "pkg2": ["src/pkg2/__init__.py"], + "pkg3": ["src/pkg3/__init__.py", "src/pkg3-stubs/__init__.py"], + "pkg4": ["pkg4/__init__.py", "pkg4-stubs/__init__.py"], + "ns.nested.pkg1": ["src/ns/nested/pkg1/__init__.py"], + "ns.nested.pkg2": ["ns/nested/pkg2/__init__.py"], + } + + @pytest.mark.parametrize("example", EXAMPLES.keys()) + def test_discover_name(self, tmp_path, example): + _populate_project_dir(tmp_path, self.EXAMPLES[example], {}) + dist = _get_dist(tmp_path, {}) + assert dist.get_name() == example + + def test_build_with_discovered_name(self, tmp_path): + files = ["src/ns/nested/pkg/__init__.py"] + _populate_project_dir(tmp_path, files, {}) + _run_build(tmp_path, "--sdist") + # Expected distribution file + dist_file = tmp_path / f"dist/ns.nested.pkg-{self.DEFAULT_VERSION}.tar.gz" + assert dist_file.is_file() + + +class TestWithAttrDirective: + @pytest.mark.parametrize( + "folder, opts", + [ + ("src", {}), + ("lib", {"packages": "find:", "packages.find": {"where": "lib"}}), + ] + ) + def test_setupcfg_metadata(self, tmp_path, folder, opts): + files = [f"{folder}/pkg/__init__.py", "setup.cfg"] + _populate_project_dir(tmp_path, files, opts) + (tmp_path / folder / "pkg/__init__.py").write_text("version = 42") + (tmp_path / "setup.cfg").write_text( + "[metadata]\nversion = attr: pkg.version\n" + + (tmp_path / "setup.cfg").read_text() + ) + + dist = _get_dist(tmp_path, {}) + assert dist.get_name() == "pkg" + assert dist.get_version() == "42" + assert dist.package_dir + package_path = find_package_path("pkg", dist.package_dir, tmp_path) + assert os.path.exists(package_path) + assert folder in _Path(package_path).parts() + + _run_build(tmp_path, "--sdist") + dist_file = tmp_path / "dist/pkg-42.tar.gz" + assert dist_file.is_file() + + def test_pyproject_metadata(self, tmp_path): + _populate_project_dir(tmp_path, ["src/pkg/__init__.py"], {}) + (tmp_path / "src/pkg/__init__.py").write_text("version = 42") + (tmp_path / "pyproject.toml").write_text( + "[project]\nname = 'pkg'\ndynamic = ['version']\n" + "[tool.setuptools.dynamic]\nversion = {attr = 'pkg.version'}\n" + ) + dist = _get_dist(tmp_path, {}) + assert dist.get_version() == "42" + assert dist.package_dir == {"": "src"} + + +class TestWithCExtension: + def _simulate_package_with_extension(self, tmp_path): + # This example is based on: https://github.com/nucleic/kiwi/tree/1.4.0 + files = [ + "benchmarks/file.py", + "docs/Makefile", + "docs/requirements.txt", + "docs/source/conf.py", + "proj/header.h", + "proj/file.py", + "py/proj.cpp", + "py/other.cpp", + "py/file.py", + "py/py.typed", + "py/tests/test_proj.py", + "README.rst", + ] + _populate_project_dir(tmp_path, files, {}) + + setup_script = """ + from setuptools import Extension, setup + + ext_modules = [ + Extension( + "proj", + ["py/proj.cpp", "py/other.cpp"], + include_dirs=["."], + language="c++", + ), + ] + setup(ext_modules=ext_modules) + """ + (tmp_path / "setup.py").write_text(DALS(setup_script)) + + def test_skip_discovery_with_setupcfg_metadata(self, tmp_path): + """Ensure that auto-discovery is not triggered when the project is based on + C-extensions only, for backward compatibility. + """ + self._simulate_package_with_extension(tmp_path) + + pyproject = """ + [build-system] + requires = [] + build-backend = 'setuptools.build_meta' + """ + (tmp_path / "pyproject.toml").write_text(DALS(pyproject)) + + setupcfg = """ + [metadata] + name = proj + version = 42 + """ + (tmp_path / "setup.cfg").write_text(DALS(setupcfg)) + + dist = _get_dist(tmp_path, {}) + assert dist.get_name() == "proj" + assert dist.get_version() == "42" + assert dist.py_modules is None + assert dist.packages is None + assert len(dist.ext_modules) == 1 + assert dist.ext_modules[0].name == "proj" + + def test_dont_skip_discovery_with_pyproject_metadata(self, tmp_path): + """When opting-in to pyproject.toml metadata, auto-discovery will be active if + the package lists C-extensions, but does not configure py-modules or packages. + + This way we ensure users with complex package layouts that would lead to the + discovery of multiple top-level modules/packages see errors and are forced to + explicitly set ``packages`` or ``py-modules``. + """ + self._simulate_package_with_extension(tmp_path) + + pyproject = """ + [project] + name = 'proj' + version = '42' + """ + (tmp_path / "pyproject.toml").write_text(DALS(pyproject)) + with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"): + _get_dist(tmp_path, {}) + + +class TestWithPackageData: + def _simulate_package_with_data_files(self, tmp_path, src_root): + files = [ + f"{src_root}/proj/__init__.py", + f"{src_root}/proj/file1.txt", + f"{src_root}/proj/nested/file2.txt", + ] + _populate_project_dir(tmp_path, files, {}) + + manifest = """ + global-include *.py *.txt + """ + (tmp_path / "MANIFEST.in").write_text(DALS(manifest)) + + EXAMPLE_SETUPCFG = """ + [metadata] + name = proj + version = 42 + + [options] + include_package_data = True + """ + EXAMPLE_PYPROJECT = """ + [project] + name = "proj" + version = "42" + """ + + PYPROJECT_PACKAGE_DIR = """ + [tool.setuptools] + package-dir = {"" = "src"} + """ + + @pytest.mark.parametrize( + "src_root, files", + [ + (".", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}), + (".", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}), + ("src", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}), + ("src", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}), + ( + "src", + { + "setup.cfg": DALS(EXAMPLE_SETUPCFG) + DALS( + """ + packages = find: + package_dir = + =src + + [options.packages.find] + where = src + """ + ) + } + ), + ( + "src", + { + "pyproject.toml": DALS(EXAMPLE_PYPROJECT) + DALS( + """ + [tool.setuptools] + package-dir = {"" = "src"} + """ + ) + }, + ), + ] + ) + def test_include_package_data(self, tmp_path, src_root, files): + """ + Make sure auto-discovery does not affect package include_package_data. + See issue #3196. + """ + jaraco.path.build(files, prefix=str(tmp_path)) + self._simulate_package_with_data_files(tmp_path, src_root) + + expected = { + os.path.normpath(f"{src_root}/proj/file1.txt").replace(os.sep, "/"), + os.path.normpath(f"{src_root}/proj/nested/file2.txt").replace(os.sep, "/"), + } + + _run_build(tmp_path) + + sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz"))) + print("~~~~~ sdist_members ~~~~~") + print('\n'.join(sdist_files)) + assert sdist_files >= expected + + wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl"))) + print("~~~~~ wheel_members ~~~~~") + print('\n'.join(wheel_files)) + orig_files = {f.replace("src/", "").replace("lib/", "") for f in expected} + assert wheel_files >= orig_files + + +def test_compatible_with_numpy_configuration(tmp_path): + files = [ + "dir1/__init__.py", + "dir2/__init__.py", + "file.py", + ] + _populate_project_dir(tmp_path, files, {}) + dist = Distribution({}) + dist.configuration = object() + dist.set_defaults() + assert dist.py_modules is None + assert dist.packages is None + + +def _populate_project_dir(root, files, options): + # NOTE: Currently pypa/build will refuse to build the project if no + # `pyproject.toml` or `setup.py` is found. So it is impossible to do + # completely "config-less" projects. + (root / "setup.py").write_text("import setuptools\nsetuptools.setup()") + (root / "README.md").write_text("# Example Package") + (root / "LICENSE").write_text("Copyright (c) 2018") + _write_setupcfg(root, options) + paths = (root / f for f in files) + for path in paths: + path.parent.mkdir(exist_ok=True, parents=True) + path.touch() + + +def _write_setupcfg(root, options): + if not options: + print("~~~~~ **NO** setup.cfg ~~~~~") + return + setupcfg = ConfigParser() + setupcfg.add_section("options") + for key, value in options.items(): + if key == "packages.find": + setupcfg.add_section(f"options.{key}") + setupcfg[f"options.{key}"].update(value) + elif isinstance(value, list): + setupcfg["options"][key] = ", ".join(value) + elif isinstance(value, dict): + str_value = "\n".join(f"\t{k} = {v}" for k, v in value.items()) + setupcfg["options"][key] = "\n" + str_value + else: + setupcfg["options"][key] = str(value) + with open(root / "setup.cfg", "w") as f: + setupcfg.write(f) + print("~~~~~ setup.cfg ~~~~~") + print((root / "setup.cfg").read_text()) + + +def _run_build(path, *flags): + cmd = [sys.executable, "-m", "build", "--no-isolation", *flags, str(path)] + return run(cmd, env={'DISTUTILS_DEBUG': ''}) + + +def _get_dist(dist_path, attrs): + root = "/".join(os.path.split(dist_path)) # POSIX-style + + script = dist_path / 'setup.py' + if script.exists(): + with _Path(dist_path): + dist = distutils.core.run_setup("setup.py", {}, stop_after="init") + else: + dist = Distribution(attrs) + + dist.src_root = root + dist.script_name = "setup.py" + with _Path(dist_path): + dist.parse_config_files() + + dist.set_defaults() + return dist + + +def _run_sdist_programatically(dist_path, attrs): + dist = _get_dist(dist_path, attrs) + cmd = sdist(dist) + cmd.ensure_finalized() + assert cmd.distribution.packages or cmd.distribution.py_modules + + with quiet(), _Path(dist_path): + cmd.run() + + return dist, cmd diff --git a/setuptools/tests/test_develop.py b/setuptools/tests/test_develop.py index 70c5794c..c52072ac 100644 --- a/setuptools/tests/test_develop.py +++ b/setuptools/tests/test_develop.py @@ -6,11 +6,11 @@ import sys import subprocess import platform import pathlib -import textwrap from setuptools.command import test import pytest +import pip_run.launch from setuptools.command.develop import develop from setuptools.dist import Distribution @@ -166,21 +166,6 @@ class TestNamespaces: with test.test.paths_on_pythonpath([str(target)]): subprocess.check_call(pkg_resources_imp) - @staticmethod - def install_workaround(site_packages): - site_packages.mkdir(parents=True) - sc = site_packages / 'sitecustomize.py' - sc.write_text( - textwrap.dedent( - """ - import site - import pathlib - here = pathlib.Path(__file__).parent - site.addsitedir(str(here)) - """ - ).lstrip() - ) - @pytest.mark.xfail( platform.python_implementation() == 'PyPy', reason="Workaround fails on PyPy (why?)", @@ -190,7 +175,6 @@ class TestNamespaces: Editable install to a prefix should be discoverable. """ prefix = tmp_path / 'prefix' - prefix.mkdir() # figure out where pip will likely install the package site_packages = prefix / next( @@ -198,9 +182,10 @@ class TestNamespaces: for path in sys.path if 'site-packages' in path and path.startswith(sys.prefix) ) + site_packages.mkdir(parents=True) - # install the workaround - self.install_workaround(site_packages) + # install workaround + pip_run.launch.inject_sitecustomize(str(site_packages)) env = dict(os.environ, PYTHONPATH=str(site_packages)) cmd = [ @@ -219,6 +204,6 @@ class TestNamespaces: # now run 'sample' with the prefix on the PYTHONPATH bin = 'Scripts' if platform.system() == 'Windows' else 'bin' exe = prefix / bin / 'sample' - if sys.version_info < (3, 7) and platform.system() == 'Windows': + if sys.version_info < (3, 8) and platform.system() == 'Windows': exe = str(exe) subprocess.check_call([exe], env=env) diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py index c4279f0b..e7d2f5ca 100644 --- a/setuptools/tests/test_dist.py +++ b/setuptools/tests/test_dist.py @@ -2,6 +2,7 @@ import io import collections import re import functools +import os import urllib.request import urllib.parse from distutils.errors import DistutilsSetupError @@ -18,6 +19,7 @@ from setuptools import Distribution from .textwrap import DALS from .test_easy_install import make_nspkg_sdist +from .test_find_packages import ensure_files import pytest @@ -69,16 +71,19 @@ def test_dist__get_unpatched_deprecated(): pytest.warns(DistDeprecationWarning, _get_unpatched, [""]) +EXAMPLE_BASE_INFO = dict( + name="package", + version="0.0.1", + author="Foo Bar", + author_email="foo@bar.net", + long_description="Long\ndescription", + description="Short description", + keywords=["one", "two"], +) + + def __read_test_cases(): - base = dict( - name="package", - version="0.0.1", - author="Foo Bar", - author_email="foo@bar.net", - long_description="Long\ndescription", - description="Short description", - keywords=["one", "two"], - ) + base = EXAMPLE_BASE_INFO params = functools.partial(dict, base) @@ -374,3 +379,131 @@ def test_check_specifier(): ) def test_rfc822_unescape(content, result): assert (result or content) == rfc822_unescape(rfc822_escape(content)) + + +def test_metadata_name(): + with pytest.raises(DistutilsSetupError, match='missing.*name'): + Distribution()._validate_metadata() + + +@pytest.mark.parametrize( + "dist_name, py_module", + [ + ("my.pkg", "my_pkg"), + ("my-pkg", "my_pkg"), + ("my_pkg", "my_pkg"), + ("pkg", "pkg"), + ] +) +def test_dist_default_py_modules(tmp_path, dist_name, py_module): + (tmp_path / f"{py_module}.py").touch() + + (tmp_path / "setup.py").touch() + (tmp_path / "noxfile.py").touch() + # ^-- make sure common tool files are ignored + + attrs = { + **EXAMPLE_BASE_INFO, + "name": dist_name, + "src_root": str(tmp_path) + } + # Find `py_modules` corresponding to dist_name if not given + dist = Distribution(attrs) + dist.set_defaults() + assert dist.py_modules == [py_module] + # When `py_modules` is given, don't do anything + dist = Distribution({**attrs, "py_modules": ["explicity_py_module"]}) + dist.set_defaults() + assert dist.py_modules == ["explicity_py_module"] + # When `packages` is given, don't do anything + dist = Distribution({**attrs, "packages": ["explicity_package"]}) + dist.set_defaults() + assert not dist.py_modules + + +@pytest.mark.parametrize( + "dist_name, package_dir, package_files, packages", + [ + ("my.pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]), + ("my-pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]), + ("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]), + ("my.pkg", None, ["my/pkg/__init__.py"], ["my", "my.pkg"]), + ( + "my_pkg", + None, + ["src/my_pkg/__init__.py", "src/my_pkg2/__init__.py"], + ["my_pkg", "my_pkg2"] + ), + ( + "my_pkg", + {"pkg": "lib", "pkg2": "lib2"}, + ["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"], + ["pkg", "pkg.nested", "pkg2"] + ), + ] +) +def test_dist_default_packages( + tmp_path, dist_name, package_dir, package_files, packages +): + ensure_files(tmp_path, package_files) + + (tmp_path / "setup.py").touch() + (tmp_path / "noxfile.py").touch() + # ^-- should not be included by default + + attrs = { + **EXAMPLE_BASE_INFO, + "name": dist_name, + "src_root": str(tmp_path), + "package_dir": package_dir + } + # Find `packages` either corresponding to dist_name or inside src + dist = Distribution(attrs) + dist.set_defaults() + assert not dist.py_modules + assert not dist.py_modules + assert set(dist.packages) == set(packages) + # When `py_modules` is given, don't do anything + dist = Distribution({**attrs, "py_modules": ["explicit_py_module"]}) + dist.set_defaults() + assert not dist.packages + assert set(dist.py_modules) == {"explicit_py_module"} + # When `packages` is given, don't do anything + dist = Distribution({**attrs, "packages": ["explicit_package"]}) + dist.set_defaults() + assert not dist.py_modules + assert set(dist.packages) == {"explicit_package"} + + +@pytest.mark.parametrize( + "dist_name, package_dir, package_files", + [ + ("my.pkg.nested", None, ["my/pkg/nested/__init__.py"]), + ("my.pkg", None, ["my/pkg/__init__.py", "my/pkg/file.py"]), + ("my_pkg", None, ["my_pkg.py"]), + ("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/nested/__init__.py"]), + ("my_pkg", None, ["src/my_pkg/__init__.py", "src/my_pkg/nested/__init__.py"]), + ( + "my_pkg", + {"my_pkg": "lib", "my_pkg.lib2": "lib2"}, + ["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"], + ), + # Should not try to guess a name from multiple py_modules/packages + ("UNKNOWN", None, ["src/mod1.py", "src/mod2.py"]), + ("UNKNOWN", None, ["src/pkg1/__ini__.py", "src/pkg2/__init__.py"]), + ] +) +def test_dist_default_name(tmp_path, dist_name, package_dir, package_files): + """Make sure dist.name is discovered from packages/py_modules""" + ensure_files(tmp_path, package_files) + attrs = { + **EXAMPLE_BASE_INFO, + "src_root": "/".join(os.path.split(tmp_path)), # POSIX-style + "package_dir": package_dir + } + del attrs["name"] + + dist = Distribution(attrs) + dist.set_defaults() + assert dist.py_modules or dist.packages + assert dist.get_name() == dist_name diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py index 29fbd09d..813ef51d 100644 --- a/setuptools/tests/test_dist_info.py +++ b/setuptools/tests/test_dist_info.py @@ -1,12 +1,21 @@ """Test .dist-info style distributions. """ +import pathlib +import re +import subprocess +import sys +from functools import partial import pytest import pkg_resources +from setuptools.archive_util import unpack_archive from .textwrap import DALS +read = partial(pathlib.Path.read_text, encoding="utf-8") + + class TestDistInfo: metadata_base = DALS(""" @@ -72,3 +81,78 @@ class TestDistInfo: pkg_resources.Requirement.parse('quux>=1.1;extra=="baz"'), ] assert d.extras == ['baz'] + + def test_invalid_version(self, tmp_path): + config = "[metadata]\nname=proj\nversion=42\n[egg_info]\ntag_build=invalid!!!\n" + (tmp_path / "setup.cfg").write_text(config, encoding="utf-8") + msg = re.compile("invalid version", re.M | re.I) + output = run_command("dist_info", cwd=tmp_path) + assert msg.search(output) + dist_info = next(tmp_path.glob("*.dist-info")) + assert dist_info.name.startswith("proj-42") + + +class TestWheelCompatibility: + """Make sure the .dist-info directory produced with the ``dist_info`` command + is the same as the one produced by ``bdist_wheel``. + """ + SETUPCFG = DALS(""" + [metadata] + name = {name} + version = {version} + + [options] + install_requires = foo>=12; sys_platform != "linux" + + [options.extras_require] + test = pytest + + [options.entry_points] + console_scripts = + executable-name = my_package.module:function + discover = + myproj = my_package.other_module:function + """) + + EGG_INFO_OPTS = [ + # Related: #3088 #2872 + ("", ""), + (".post", "[egg_info]\ntag_build = post\n"), + (".post", "[egg_info]\ntag_build = .post\n"), + (".post", "[egg_info]\ntag_build = post\ntag_date = 1\n"), + (".dev", "[egg_info]\ntag_build = .dev\n"), + (".dev", "[egg_info]\ntag_build = .dev\ntag_date = 1\n"), + ("a1", "[egg_info]\ntag_build = .a1\n"), + ("+local", "[egg_info]\ntag_build = +local\n"), + ] + + @pytest.mark.parametrize("name", "my-proj my_proj my.proj My.Proj".split()) + @pytest.mark.parametrize("version", ["0.42.13"]) + @pytest.mark.parametrize("suffix, cfg", EGG_INFO_OPTS) + def test_dist_info_is_the_same_as_in_wheel( + self, name, version, tmp_path, suffix, cfg + ): + config = self.SETUPCFG.format(name=name, version=version) + cfg + + for i in "dir_wheel", "dir_dist": + (tmp_path / i).mkdir() + (tmp_path / i / "setup.cfg").write_text(config, encoding="utf-8") + + run_command("bdist_wheel", cwd=tmp_path / "dir_wheel") + wheel = next(tmp_path.glob("dir_wheel/dist/*.whl")) + unpack_archive(wheel, tmp_path / "unpack") + wheel_dist_info = next(tmp_path.glob("unpack/*.dist-info")) + + run_command("dist_info", cwd=tmp_path / "dir_dist") + dist_info = next(tmp_path.glob("dir_dist/*.dist-info")) + + assert dist_info.name == wheel_dist_info.name + assert dist_info.name.startswith(f"{name.replace('-', '_')}-{version}{suffix}") + for file in "METADATA", "entry_points.txt": + assert read(dist_info / file) == read(wheel_dist_info / file) + + +def run_command(*cmd, **kwargs): + opts = {"stderr": subprocess.STDOUT, "text": True, **kwargs} + cmd = [sys.executable, "-c", "__import__('setuptools').setup()", *cmd] + return subprocess.check_output(cmd, **opts) diff --git a/setuptools/tests/test_distutils_adoption.py b/setuptools/tests/test_distutils_adoption.py index b6b9c00e..df8f3541 100644 --- a/setuptools/tests/test_distutils_adoption.py +++ b/setuptools/tests/test_distutils_adoption.py @@ -1,38 +1,15 @@ import os import sys import functools -import subprocess import platform +import textwrap import pytest -import jaraco.envs -import path IS_PYPY = '__pypy__' in sys.builtin_module_names -class VirtualEnv(jaraco.envs.VirtualEnv): - name = '.env' - # Some version of PyPy will import distutils on startup, implicitly - # importing setuptools, and thus leading to BackendInvalid errors - # when upgrading Setuptools. Bypass this behavior by avoiding the - # early availability and need to upgrade. - create_opts = ['--no-setuptools'] - - def run(self, cmd, *args, **kwargs): - cmd = [self.exe(cmd[0])] + cmd[1:] - return subprocess.check_output(cmd, *args, cwd=self.root, **kwargs) - - -@pytest.fixture -def venv(tmp_path, tmp_src): - env = VirtualEnv() - env.root = path.Path(tmp_path / 'venv') - env.req = str(tmp_src) - return env.create() - - def popen_text(call): """ Augment the Popen call with the parameters to ensure unicode text. @@ -41,12 +18,35 @@ def popen_text(call): if sys.version_info < (3, 7) else functools.partial(call, text=True) +def win_sr(env): + """ + On Windows, SYSTEMROOT must be present to avoid + + > Fatal Python error: _Py_HashRandomization_Init: failed to + > get random numbers to initialize Python + """ + if env is None: + return + if platform.system() == 'Windows': + env['SYSTEMROOT'] = os.environ['SYSTEMROOT'] + return env + + def find_distutils(venv, imports='distutils', env=None, **kwargs): py_cmd = 'import {imports}; print(distutils.__file__)'.format(**locals()) cmd = ['python', '-c', py_cmd] - if platform.system() == 'Windows': - env['SYSTEMROOT'] = os.environ['SYSTEMROOT'] - return popen_text(venv.run)(cmd, env=env, **kwargs) + return popen_text(venv.run)(cmd, env=win_sr(env), **kwargs) + + +def count_meta_path(venv, env=None): + py_cmd = textwrap.dedent( + """ + import sys + is_distutils = lambda finder: finder.__class__.__name__ == "DistutilsMetaFinder" + print(len(list(filter(is_distutils, sys.meta_path)))) + """) + cmd = ['python', '-c', py_cmd] + return int(popen_text(venv.run)(cmd, env=win_sr(env))) def test_distutils_stdlib(venv): @@ -55,6 +55,7 @@ def test_distutils_stdlib(venv): """ env = dict(SETUPTOOLS_USE_DISTUTILS='stdlib') assert venv.name not in find_distutils(venv, env=env).split(os.sep) + assert count_meta_path(venv, env=env) == 0 def test_distutils_local_with_setuptools(venv): @@ -64,6 +65,7 @@ def test_distutils_local_with_setuptools(venv): env = dict(SETUPTOOLS_USE_DISTUTILS='local') loc = find_distutils(venv, imports='setuptools, distutils', env=env) assert venv.name in loc.split(os.sep) + assert count_meta_path(venv, env=env) <= 1 @pytest.mark.xfail('IS_PYPY', reason='pypy imports distutils on startup') @@ -74,3 +76,83 @@ def test_distutils_local(venv): """ env = dict(SETUPTOOLS_USE_DISTUTILS='local') assert venv.name in find_distutils(venv, env=env).split(os.sep) + assert count_meta_path(venv, env=env) <= 1 + + +def test_pip_import(venv): + """ + Ensure pip can be imported. + Regression test for #3002. + """ + cmd = ['python', '-c', 'import pip'] + popen_text(venv.run)(cmd) + + +def test_distutils_has_origin(): + """ + Distutils module spec should have an origin. #2990. + """ + assert __import__('distutils').__spec__.origin + + +ENSURE_IMPORTS_ARE_NOT_DUPLICATED = r""" +# Depending on the importlib machinery and _distutils_hack, some imports are +# duplicated resulting in different module objects being loaded, which prevents +# patches as shown in #3042. +# This script provides a way of verifying if this duplication is happening. + +from distutils import cmd +import distutils.command.sdist as sdist + +# import last to prevent caching +from distutils import {imported_module} + +for mod in (cmd, sdist): + assert mod.{imported_module} == {imported_module}, ( + f"\n{{mod.dir_util}}\n!=\n{{{imported_module}}}" + ) + +print("success") +""" + + +@pytest.mark.parametrize( + "distutils_version, imported_module", + [ + ("stdlib", "dir_util"), + ("stdlib", "file_util"), + ("stdlib", "archive_util"), + ("local", "dir_util"), + ("local", "file_util"), + ("local", "archive_util"), + ] +) +def test_modules_are_not_duplicated_on_import( + distutils_version, imported_module, tmpdir_cwd, venv +): + env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version) + script = ENSURE_IMPORTS_ARE_NOT_DUPLICATED.format(imported_module=imported_module) + cmd = ['python', '-c', script] + output = popen_text(venv.run)(cmd, env=win_sr(env)).strip() + assert output == "success" + + +ENSURE_LOG_IMPORT_IS_NOT_DUPLICATED = r""" +# Similar to ENSURE_IMPORTS_ARE_NOT_DUPLICATED +import distutils.dist as dist +from distutils import log + +assert dist.log == log, ( + f"\n{dist.log}\n!=\n{log}" +) + +print("success") +""" + + +@pytest.mark.parametrize("distutils_version", "local stdlib".split()) +def test_log_module_is_not_duplicated_on_import(distutils_version, tmpdir_cwd, venv): + env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version) + cmd = ['python', '-c', ENSURE_LOG_IMPORT_IS_NOT_DUPLICATED] + output = popen_text(venv.run)(cmd, env=win_sr(env)).strip() + assert output == "success" diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py index c7026852..64f2d631 100644 --- a/setuptools/tests/test_easy_install.py +++ b/setuptools/tests/test_easy_install.py @@ -14,6 +14,10 @@ import zipfile import mock import time import re +import subprocess +import pathlib +import warnings +import collections import pytest @@ -55,7 +59,7 @@ class FakeDist: SETUP_PY = DALS(""" from setuptools import setup - setup(name='foo') + setup() """) @@ -365,6 +369,63 @@ def mock_index(): return p_index +class TestInstallRequires: + def test_setup_install_includes_dependencies(self, tmp_path, mock_index): + """ + When ``python setup.py install`` is called directly, it will use easy_install + to fetch dependencies. + """ + # TODO: Remove these tests once `setup.py install` is completely removed + project_root = tmp_path / "project" + project_root.mkdir(exist_ok=True) + install_root = tmp_path / "install" + install_root.mkdir(exist_ok=True) + + self.create_project(project_root) + cmd = [ + sys.executable, + '-c', '__import__("setuptools").setup()', + 'install', + '--install-base', str(install_root), + '--install-lib', str(install_root), + '--install-headers', str(install_root), + '--install-scripts', str(install_root), + '--install-data', str(install_root), + '--install-purelib', str(install_root), + '--install-platlib', str(install_root), + ] + env = {"PYTHONPATH": str(install_root), "__EASYINSTALL_INDEX": mock_index.url} + with pytest.raises(subprocess.CalledProcessError) as exc_info: + subprocess.check_output( + cmd, cwd=str(project_root), env=env, stderr=subprocess.STDOUT, text=True + ) + try: + assert '/does-not-exist/' in {r.path for r in mock_index.requests} + assert next( + line + for line in exc_info.value.output.splitlines() + if "not find suitable distribution for" in line + and "does-not-exist" in line + ) + except Exception: + if "failed to get random numbers" in exc_info.value.output: + pytest.xfail(f"{sys.platform} failure - {exc_info.value.output}") + raise + + def create_project(self, root): + config = """ + [metadata] + name = project + version = 42 + + [options] + install_requires = does-not-exist + py_modules = mod + """ + (root / 'setup.cfg').write_text(DALS(config), encoding="utf-8") + (root / 'mod.py').touch() + + def make_trivial_sdist(dist_path, distname, version): """ Create a simple sdist tarball at dist_path, containing just a simple @@ -597,3 +658,102 @@ class TestWindowsScriptWriter: hdr = hdr.rstrip('\n') # header should not start with an escaped quote assert not hdr.startswith('\\"') + + +VersionStub = collections.namedtuple( + "VersionStub", "major, minor, micro, releaselevel, serial") + + +def test_use_correct_python_version_string(tmpdir, tmpdir_cwd, monkeypatch): + # In issue #3001, easy_install wrongly uses the `python3.1` directory + # when the interpreter is `python3.10` and the `--user` option is given. + # See pypa/setuptools#3001. + dist = Distribution() + cmd = dist.get_command_obj('easy_install') + cmd.args = ['ok'] + cmd.optimize = 0 + cmd.user = True + cmd.install_userbase = str(tmpdir) + cmd.install_usersite = None + install_cmd = dist.get_command_obj('install') + install_cmd.install_userbase = str(tmpdir) + install_cmd.install_usersite = None + + with monkeypatch.context() as patch, warnings.catch_warnings(): + warnings.simplefilter("ignore") + version = '3.10.1 (main, Dec 21 2021, 09:17:12) [GCC 10.2.1 20210110]' + info = VersionStub(3, 10, 1, "final", 0) + patch.setattr('site.ENABLE_USER_SITE', True) + patch.setattr('sys.version', version) + patch.setattr('sys.version_info', info) + patch.setattr(cmd, 'create_home_path', mock.Mock()) + cmd.finalize_options() + + name = "pypy" if hasattr(sys, 'pypy_version_info') else "python" + install_dir = cmd.install_dir.lower() + + # In some platforms (e.g. Windows), install_dir is mostly determined + # via `sysconfig`, which define constants eagerly at module creation. + # This means that monkeypatching `sys.version` to emulate 3.10 for testing + # may have no effect. + # The safest test here is to rely on the fact that 3.1 is no longer + # supported/tested, and make sure that if 'python3.1' ever appears in the string + # it is followed by another digit (e.g. 'python3.10'). + if re.search(name + r'3\.?1', install_dir): + assert re.search(name + r'3\.?1\d', install_dir) + + # The following "variables" are used for interpolation in distutils + # installation schemes, so it should be fair to treat them as "semi-public", + # or at least public enough so we can have a test to make sure they are correct + assert cmd.config_vars['py_version'] == '3.10.1' + assert cmd.config_vars['py_version_short'] == '3.10' + assert cmd.config_vars['py_version_nodot'] == '310' + + +def test_editable_user_and_build_isolation(setup_context, monkeypatch, tmp_path): + ''' `setup.py develop` should honor `--user` even under build isolation''' + + # == Arrange == + # Pretend that build isolation was enabled + # e.g pip sets the environment varible PYTHONNOUSERSITE=1 + monkeypatch.setattr('site.ENABLE_USER_SITE', False) + + # Patching $HOME for 2 reasons: + # 1. setuptools/command/easy_install.py:create_home_path + # tries creating directories in $HOME + # given `self.config_vars['DESTDIRS'] = "/home/user/.pyenv/versions/3.9.10 /home/user/.pyenv/versions/3.9.10/lib /home/user/.pyenv/versions/3.9.10/lib/python3.9 /home/user/.pyenv/versions/3.9.10/lib/python3.9/lib-dynload"`` # noqa: E501 + # it will `makedirs("/home/user/.pyenv/versions/3.9.10 /home/user/.pyenv/versions/3.9.10/lib /home/user/.pyenv/versions/3.9.10/lib/python3.9 /home/user/.pyenv/versions/3.9.10/lib/python3.9/lib-dynload")`` # noqa: E501 + # 2. We are going to force `site` to update site.USER_BASE and site.USER_SITE + # To point inside our new home + monkeypatch.setenv('HOME', str(tmp_path / '.home')) + monkeypatch.setenv('USERPROFILE', str(tmp_path / '.home')) + monkeypatch.setenv('APPDATA', str(tmp_path / '.home')) + monkeypatch.setattr('site.USER_BASE', None) + monkeypatch.setattr('site.USER_SITE', None) + user_site = pathlib.Path(site.getusersitepackages()) + user_site.mkdir(parents=True, exist_ok=True) + + sys_prefix = (tmp_path / '.sys_prefix') + sys_prefix.mkdir(parents=True, exist_ok=True) + monkeypatch.setattr('sys.prefix', str(sys_prefix)) + + setup_script = ( + "__import__('setuptools').setup(name='aproj', version=42, packages=[])\n" + ) + (tmp_path / "setup.py").write_text(setup_script, encoding="utf-8") + + # == Sanity check == + assert list(sys_prefix.glob("*")) == [] + assert list(user_site.glob("*")) == [] + + # == Act == + run_setup('setup.py', ['develop', '--user']) + + # == Assert == + # Should not install to sys.prefix + assert list(sys_prefix.glob("*")) == [] + # Should install to user site + installed = {f.name for f in user_site.glob("*")} + # sometimes easy-install.pth is created and sometimes not + installed = installed - {"easy-install.pth"} + assert installed == {'aproj.egg-link'} diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py new file mode 100644 index 00000000..aac4f5ee --- /dev/null +++ b/setuptools/tests/test_editable_install.py @@ -0,0 +1,113 @@ +import subprocess +from textwrap import dedent + +import pytest +import jaraco.envs +import path + + +@pytest.fixture +def venv(tmp_path, setuptools_wheel): + env = jaraco.envs.VirtualEnv() + vars(env).update( + root=path.Path(tmp_path), # workaround for error on windows + name=".venv", + create_opts=["--no-setuptools"], + req=str(setuptools_wheel), + ) + return env.create() + + +EXAMPLE = { + 'pyproject.toml': dedent("""\ + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + + [project] + name = "mypkg" + version = "3.14159" + license = {text = "MIT"} + description = "This is a Python package" + dynamic = ["readme"] + classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers" + ] + urls = {Homepage = "http://github.com"} + dependencies = ['importlib-metadata; python_version<"3.8"'] + + [tool.setuptools] + package-dir = {"" = "src"} + packages = {find = {where = ["src"]}} + license-files = ["LICENSE*"] + + [tool.setuptools.dynamic] + readme = {file = "README.rst"} + + [tool.distutils.egg_info] + tag-build = ".post0" + """), + "MANIFEST.in": dedent("""\ + global-include *.py *.txt + global-exclude *.py[cod] + """).strip(), + "README.rst": "This is a ``README``", + "LICENSE.txt": "---- placeholder MIT license ----", + "src": { + "mypkg": { + "__init__.py": dedent("""\ + import sys + + if sys.version_info[:2] >= (3, 8): + from importlib.metadata import PackageNotFoundError, version + else: + from importlib_metadata import PackageNotFoundError, version + + try: + __version__ = version(__name__) + except PackageNotFoundError: + __version__ = "unknown" + """), + "__main__.py": dedent("""\ + from importlib.resources import read_text + from . import __version__, __name__ as parent + from .mod import x + + data = read_text(parent, "data.txt") + print(__version__, data, x) + """), + "mod.py": "x = ''", + "data.txt": "Hello World", + } + } +} + + +SETUP_SCRIPT_STUB = "__import__('setuptools').setup()" +MISSING_SETUP_SCRIPT = pytest.param( + None, + marks=pytest.mark.xfail( + reason="Editable install is currently only supported with `setup.py`" + ) +) + + +@pytest.mark.parametrize("setup_script", [SETUP_SCRIPT_STUB, MISSING_SETUP_SCRIPT]) +def test_editable_with_pyproject(tmp_path, venv, setup_script): + project = tmp_path / "mypkg" + files = {**EXAMPLE, "setup.py": setup_script} + project.mkdir() + jaraco.path.build(files, prefix=project) + + cmd = [venv.exe(), "-m", "pip", "install", + "--no-build-isolation", # required to force current version of setuptools + "-e", str(project)] + print(str(subprocess.check_output(cmd), "utf-8")) + + cmd = [venv.exe(), "-m", "mypkg"] + assert subprocess.check_output(cmd).strip() == b"3.14159.post0 Hello World" + + (project / "src/mypkg/data.txt").write_text("foobar") + (project / "src/mypkg/mod.py").write_text("x = 42") + assert subprocess.check_output(cmd).strip() == b"3.14159.post0 foobar 42" diff --git a/setuptools/tests/test_find_packages.py b/setuptools/tests/test_find_packages.py index 906713f6..efcce924 100644 --- a/setuptools/tests/test_find_packages.py +++ b/setuptools/tests/test_find_packages.py @@ -1,4 +1,4 @@ -"""Tests for setuptools.find_packages().""" +"""Tests for automatic package discovery""" import os import sys import shutil @@ -9,6 +9,7 @@ import pytest from setuptools import find_packages from setuptools import find_namespace_packages +from setuptools.discovery import FlatLayoutPackageFinder # modeled after CPython's test.support.can_symlink @@ -178,3 +179,67 @@ class TestFindPackages: shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets')) packages = find_namespace_packages(self.dist_dir) self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) + + +class TestFlatLayoutPackageFinder: + EXAMPLES = { + "hidden-folders": ( + [".pkg/__init__.py", "pkg/__init__.py", "pkg/nested/file.txt"], + ["pkg", "pkg.nested"] + ), + "private-packages": ( + ["_pkg/__init__.py", "pkg/_private/__init__.py"], + ["pkg", "pkg._private"] + ), + "invalid-name": ( + ["invalid-pkg/__init__.py", "other.pkg/__init__.py", "yet,another/file.py"], + [] + ), + "docs": ( + ["pkg/__init__.py", "docs/conf.py", "docs/readme.rst"], + ["pkg"] + ), + "tests": ( + ["pkg/__init__.py", "tests/test_pkg.py", "tests/__init__.py"], + ["pkg"] + ), + "examples": ( + [ + "pkg/__init__.py", + "examples/__init__.py", + "examples/file.py" + "example/other_file.py", + # Sub-packages should always be fine + "pkg/example/__init__.py", + "pkg/examples/__init__.py", + ], + ["pkg", "pkg.examples", "pkg.example"] + ), + "tool-specific": ( + [ + "pkg/__init__.py", + "tasks/__init__.py", + "tasks/subpackage/__init__.py", + "fabfile/__init__.py", + "fabfile/subpackage/__init__.py", + # Sub-packages should always be fine + "pkg/tasks/__init__.py", + "pkg/fabfile/__init__.py", + ], + ["pkg", "pkg.tasks", "pkg.fabfile"] + ) + } + + @pytest.mark.parametrize("example", EXAMPLES.keys()) + def test_unwanted_directories_not_included(self, tmp_path, example): + files, expected_packages = self.EXAMPLES[example] + ensure_files(tmp_path, files) + found_packages = FlatLayoutPackageFinder.find(str(tmp_path)) + assert set(found_packages) == set(expected_packages) + + +def ensure_files(root_path, files): + for file in files: + path = root_path / file + path.parent.mkdir(parents=True, exist_ok=True) + path.touch() diff --git a/setuptools/tests/test_find_py_modules.py b/setuptools/tests/test_find_py_modules.py new file mode 100644 index 00000000..4ef68801 --- /dev/null +++ b/setuptools/tests/test_find_py_modules.py @@ -0,0 +1,81 @@ +"""Tests for automatic discovery of modules""" +import os + +import pytest + +from setuptools.discovery import FlatLayoutModuleFinder, ModuleFinder + +from .test_find_packages import ensure_files, has_symlink + + +class TestModuleFinder: + def find(self, path, *args, **kwargs): + return set(ModuleFinder.find(str(path), *args, **kwargs)) + + EXAMPLES = { + # circumstance: (files, kwargs, expected_modules) + "simple_folder": ( + ["file.py", "other.py"], + {}, # kwargs + ["file", "other"], + ), + "exclude": ( + ["file.py", "other.py"], + {"exclude": ["f*"]}, + ["other"], + ), + "include": ( + ["file.py", "fole.py", "other.py"], + {"include": ["f*"], "exclude": ["fo*"]}, + ["file"], + ), + "invalid-name": ( + ["my-file.py", "other.file.py"], + {}, + [] + ) + } + + @pytest.mark.parametrize("example", EXAMPLES.keys()) + def test_finder(self, tmp_path, example): + files, kwargs, expected_modules = self.EXAMPLES[example] + ensure_files(tmp_path, files) + assert self.find(tmp_path, **kwargs) == set(expected_modules) + + @pytest.mark.skipif(not has_symlink(), reason='Symlink support required') + def test_symlinked_packages_are_included(self, tmp_path): + src = "_myfiles/file.py" + ensure_files(tmp_path, [src]) + os.symlink(tmp_path / src, tmp_path / "link.py") + assert self.find(tmp_path) == {"link"} + + +class TestFlatLayoutModuleFinder: + def find(self, path, *args, **kwargs): + return set(FlatLayoutModuleFinder.find(str(path))) + + EXAMPLES = { + # circumstance: (files, expected_modules) + "hidden-files": ( + [".module.py"], + [] + ), + "private-modules": ( + ["_module.py"], + [] + ), + "common-names": ( + ["setup.py", "conftest.py", "test.py", "tests.py", "example.py", "mod.py"], + ["mod"] + ), + "tool-specific": ( + ["tasks.py", "fabfile.py", "noxfile.py", "dodo.py", "manage.py", "mod.py"], + ["mod"] + ) + } + + @pytest.mark.parametrize("example", EXAMPLES.keys()) + def test_unwanted_files_not_included(self, tmp_path, example): + files, expected_modules = self.EXAMPLES[example] + ensure_files(tmp_path, files) + assert self.find(tmp_path) == set(expected_modules) diff --git a/setuptools/tests/test_logging.py b/setuptools/tests/test_logging.py new file mode 100644 index 00000000..a5ddd56d --- /dev/null +++ b/setuptools/tests/test_logging.py @@ -0,0 +1,36 @@ +import logging + +import pytest + + +setup_py = """\ +from setuptools import setup + +setup( + name="test_logging", + version="0.0" +) +""" + + +@pytest.mark.parametrize( + "flag, expected_level", [("--dry-run", "INFO"), ("--verbose", "DEBUG")] +) +def test_verbosity_level(tmp_path, monkeypatch, flag, expected_level): + """Make sure the correct verbosity level is set (issue #3038)""" + import setuptools # noqa: Import setuptools to monkeypatch distutils + import distutils # <- load distutils after all the patches take place + + logger = logging.Logger(__name__) + monkeypatch.setattr(logging, "root", logger) + unset_log_level = logger.getEffectiveLevel() + assert logging.getLevelName(unset_log_level) == "NOTSET" + + setup_script = tmp_path / "setup.py" + setup_script.write_text(setup_py) + dist = distutils.core.run_setup(setup_script, stop_after="init") + dist.script_args = [flag, "sdist"] + dist.parse_command_line() # <- where the log level is set + log_level = logger.getEffectiveLevel() + log_level_name = logging.getLevelName(log_level) + assert log_level_name == expected_level diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py index 66f46ad0..302cff73 100644 --- a/setuptools/tests/test_sdist.py +++ b/setuptools/tests/test_sdist.py @@ -10,7 +10,7 @@ from unittest import mock import pytest -import pkg_resources +from setuptools._importlib import metadata from setuptools import SetuptoolsDeprecationWarning from setuptools.command.sdist import sdist from setuptools.command.egg_info import manifest_maker @@ -529,7 +529,9 @@ def test_default_revctrl(): This interface must be maintained until Ubuntu 12.04 is no longer supported (by Setuptools). """ - ep_def = 'svn_cvs = setuptools.command.sdist:_default_revctrl' - ep = pkg_resources.EntryPoint.parse(ep_def) - res = ep.resolve() + ep, = metadata.EntryPoints._from_text(""" + [setuptools.file_finders] + svn_cvs = setuptools.command.sdist:_default_revctrl + """) + res = ep.load() assert hasattr(res, '__iter__') diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py index 42f8e18b..0640f49d 100644 --- a/setuptools/tests/test_setuptools.py +++ b/setuptools/tests/test_setuptools.py @@ -7,16 +7,23 @@ import distutils.cmd from distutils.errors import DistutilsOptionError from distutils.errors import DistutilsSetupError from distutils.core import Extension -from distutils.version import LooseVersion +from zipfile import ZipFile import pytest +from setuptools.extern.packaging import version + import setuptools import setuptools.dist import setuptools.depends as dep from setuptools.depends import Require +@pytest.fixture(autouse=True) +def isolated_dir(tmpdir_cwd): + yield + + def makeSetup(**args): """Return distribution from 'setup(**args)', without executing commands""" @@ -84,12 +91,12 @@ class TestDepends: assert req.name == 'Json' assert req.module == 'json' - assert req.requested_version == '1.0.3' + assert req.requested_version == version.Version('1.0.3') assert req.attribute == '__version__' assert req.full_name() == 'Json-1.0.3' from json import __version__ - assert req.get_version() == __version__ + assert str(req.get_version()) == __version__ assert req.version_ok('1.0.9') assert not req.version_ok('0.9.1') assert not req.version_ok('unknown') @@ -97,11 +104,6 @@ class TestDepends: assert req.is_present() assert req.is_current() - req = Require('Json 3000', '03000', 'json', format=LooseVersion) - assert req.is_present() - assert not req.is_current() - assert not req.version_ok('unknown') - req = Require('Do-what-I-mean', '1.0', 'd-w-i-m') assert not req.is_present() assert not req.is_current() @@ -293,3 +295,16 @@ def test_findall_missing_symlink(tmpdir, can_symlink): os.symlink('foo', 'bar') found = list(setuptools.findall()) assert found == [] + + +def test_its_own_wheel_does_not_contain_tests(setuptools_wheel): + with ZipFile(setuptools_wheel) as zipfile: + contents = [f.replace(os.sep, '/') for f in zipfile.namelist()] + + for member in contents: + assert '/tests/' not in member + + +def test_convert_path_deprecated(): + with pytest.warns(setuptools.SetuptoolsDeprecationWarning): + setuptools.convert_path('setuptools/tests') diff --git a/setuptools/tests/test_sphinx_upload_docs.py b/setuptools/tests/test_sphinx_upload_docs.py deleted file mode 100644 index cc5b8293..00000000 --- a/setuptools/tests/test_sphinx_upload_docs.py +++ /dev/null @@ -1,38 +0,0 @@ -import pytest - -from jaraco import path - -from setuptools.command.upload_docs import upload_docs -from setuptools.dist import Distribution - - -@pytest.fixture -def sphinx_doc_sample_project(tmpdir_cwd): - path.build({ - 'setup.py': 'from setuptools import setup; setup()', - 'build': { - 'docs': { - 'conf.py': 'project="test"', - 'index.rst': ".. toctree::\ - :maxdepth: 2\ - :caption: Contents:", - }, - }, - }) - - -@pytest.mark.usefixtures('sphinx_doc_sample_project') -class TestSphinxUploadDocs: - def test_sphinx_doc(self): - params = dict( - name='foo', - packages=['test'], - ) - dist = Distribution(params) - - cmd = upload_docs(dist) - - cmd.initialize_options() - assert cmd.upload_dir is None - assert cmd.has_sphinx() is True - cmd.finalize_options() diff --git a/setuptools/tests/test_test.py b/setuptools/tests/test_test.py index 8b8d9e6c..530474d7 100644 --- a/setuptools/tests/test_test.py +++ b/setuptools/tests/test_test.py @@ -10,7 +10,6 @@ from .textwrap import DALS @pytest.mark.usefixtures('tmpdir_cwd') def test_tests_are_run_once(capfd): params = dict( - name='foo', packages=['dummy'], ) files = { diff --git a/setuptools/tests/test_upload_docs.py b/setuptools/tests/test_upload_docs.py deleted file mode 100644 index 55978aad..00000000 --- a/setuptools/tests/test_upload_docs.py +++ /dev/null @@ -1,64 +0,0 @@ -import os -import zipfile -import contextlib - -import pytest -from jaraco import path - -from setuptools.command.upload_docs import upload_docs -from setuptools.dist import Distribution - -from .textwrap import DALS -from . import contexts - - -@pytest.fixture -def sample_project(tmpdir_cwd): - path.build({ - 'setup.py': DALS(""" - from setuptools import setup - - setup(name='foo') - """), - 'build': { - 'index.html': 'Hello world.', - 'empty': {}, - } - }) - - -@pytest.mark.usefixtures('sample_project') -@pytest.mark.usefixtures('user_override') -class TestUploadDocsTest: - def test_create_zipfile(self): - """ - Ensure zipfile creation handles common cases, including a folder - containing an empty folder. - """ - - dist = Distribution() - - cmd = upload_docs(dist) - cmd.target_dir = cmd.upload_dir = 'build' - with contexts.tempdir() as tmp_dir: - tmp_file = os.path.join(tmp_dir, 'foo.zip') - zip_file = cmd.create_zipfile(tmp_file) - - assert zipfile.is_zipfile(tmp_file) - - with contextlib.closing(zipfile.ZipFile(tmp_file)) as zip_file: - assert zip_file.namelist() == ['index.html'] - - def test_build_multipart(self): - data = dict( - a="foo", - b="bar", - file=('file.txt', b'content'), - ) - body, content_type = upload_docs._build_multipart(data) - assert 'form-data' in content_type - assert "b'" not in content_type - assert 'b"' not in content_type - assert isinstance(body, bytes) - assert b'foo' in body - assert b'content' in body diff --git a/setuptools/tests/test_virtualenv.py b/setuptools/tests/test_virtualenv.py index 00f5f185..65358543 100644 --- a/setuptools/tests/test_virtualenv.py +++ b/setuptools/tests/test_virtualenv.py @@ -1,139 +1,95 @@ -import glob import os import sys -import itertools +import subprocess +from urllib.request import urlopen +from urllib.error import URLError import pathlib import pytest -from pytest_fixture_config import yield_requires_config - -import pytest_virtualenv +from . import contexts from .textwrap import DALS from .test_easy_install import make_nspkg_sdist @pytest.fixture(autouse=True) -def pytest_virtualenv_works(virtualenv): +def pytest_virtualenv_works(venv): """ pytest_virtualenv may not work. if it doesn't, skip these tests. See #1284. """ - venv_prefix = virtualenv.run( - 'python -c "import sys; print(sys.prefix)"', - capture=True, - ).strip() + venv_prefix = venv.run(["python" , "-c", "import sys; print(sys.prefix)"]).strip() if venv_prefix == sys.prefix: pytest.skip("virtualenv is broken (see pypa/setuptools#1284)") -@yield_requires_config(pytest_virtualenv.CONFIG, ['virtualenv_executable']) -@pytest.fixture(scope='function') -def bare_virtualenv(): - """ Bare virtualenv (no pip/setuptools/wheel). - """ - with pytest_virtualenv.VirtualEnv(args=( - '--no-wheel', - '--no-pip', - '--no-setuptools', - )) as venv: - yield venv - - -def test_clean_env_install(bare_virtualenv, tmp_src): +def test_clean_env_install(venv_without_setuptools, setuptools_wheel): """ Check setuptools can be installed in a clean environment. """ - cmd = [bare_virtualenv.python, 'setup.py', 'install'] - bare_virtualenv.run(cmd, cd=tmp_src) - + cmd = ["python", "-m", "pip", "install", str(setuptools_wheel)] + venv_without_setuptools.run(cmd) -def _get_pip_versions(): - # This fixture will attempt to detect if tests are being run without - # network connectivity and if so skip some tests - network = True +def access_pypi(): + # Detect if tests are being run without connectivity if not os.environ.get('NETWORK_REQUIRED', False): # pragma: nocover try: - from urllib.request import urlopen - from urllib.error import URLError - except ImportError: - from urllib2 import urlopen, URLError # Python 2.7 compat - - try: urlopen('https://pypi.org', timeout=1) except URLError: # No network, disable most of these tests - network = False + return False - def mark(param, *marks): - if not isinstance(param, type(pytest.param(''))): - param = pytest.param(param) - return param._replace(marks=param.marks + marks) + return True - def skip_network(param): - return param if network else mark(param, pytest.mark.skip(reason="no network")) - network_versions = [ - mark('pip<20', pytest.mark.xfail(reason='pypa/pip#6599')), +@pytest.mark.skipif( + 'platform.python_implementation() == "PyPy"', + reason="https://github.com/pypa/setuptools/pull/2865#issuecomment-965834995", +) +@pytest.mark.skipif(not access_pypi(), reason="no network") +# ^-- Even when it is not necessary to install a different version of `pip` +# the build process will still try to download `wheel`, see #3147 and #2986. +@pytest.mark.parametrize( + 'pip_version', + [ + None, + pytest.param('pip<20', marks=pytest.mark.xfail(reason='pypa/pip#6599')), 'pip<20.1', 'pip<21', 'pip<22', - mark( + pytest.param( 'https://github.com/pypa/pip/archive/main.zip', - pytest.mark.skipif('sys.version_info < (3, 7)'), + marks=pytest.mark.xfail(reason='#2975'), ), ] - - versions = itertools.chain( - [None], - map(skip_network, network_versions) - ) - - return list(versions) - - -@pytest.mark.skipif( - 'platform.python_implementation() == "PyPy"', - reason="https://github.com/pypa/setuptools/pull/2865#issuecomment-965834995", ) -@pytest.mark.parametrize('pip_version', _get_pip_versions()) -def test_pip_upgrade_from_source(pip_version, tmp_src, virtualenv): +def test_pip_upgrade_from_source(pip_version, venv_without_setuptools, + setuptools_wheel, setuptools_sdist): """ Check pip can upgrade setuptools from source. """ - # Install pip/wheel, and remove setuptools (as it + # Install pip/wheel, in a venv without setuptools (as it # should not be needed for bootstraping from source) - if pip_version is None: - upgrade_pip = () - else: - upgrade_pip = ('python -m pip install -U "{pip_version}" --retries=1',) - virtualenv.run(' && '.join(( - 'pip uninstall -y setuptools', - 'pip install -U wheel', - ) + upgrade_pip).format(pip_version=pip_version)) - dist_dir = virtualenv.workspace - # Generate source distribution / wheel. - virtualenv.run(' && '.join(( - 'python setup.py -q sdist -d {dist}', - 'python setup.py -q bdist_wheel -d {dist}', - )).format(dist=dist_dir), cd=tmp_src) - sdist = glob.glob(os.path.join(dist_dir, '*.zip'))[0] - wheel = glob.glob(os.path.join(dist_dir, '*.whl'))[0] - # Then update from wheel. - virtualenv.run('pip install ' + wheel) + venv = venv_without_setuptools + venv.run(["pip", "install", "-U", "wheel"]) + if pip_version is not None: + venv.run(["python", "-m", "pip", "install", "-U", pip_version, "--retries=1"]) + with pytest.raises(subprocess.CalledProcessError): + # Meta-test to make sure setuptools is not installed + venv.run(["python", "-c", "import setuptools"]) + + # Then install from wheel. + venv.run(["pip", "install", str(setuptools_wheel)]) # And finally try to upgrade from source. - virtualenv.run('pip install --no-cache-dir --upgrade ' + sdist) + venv.run(["pip", "install", "--no-cache-dir", "--upgrade", str(setuptools_sdist)]) -def _check_test_command_install_requirements(virtualenv, tmpdir, cwd): +def _check_test_command_install_requirements(venv, tmpdir): """ Check the test command will install all required dependencies. """ - # Install setuptools. - virtualenv.run('python setup.py develop', cd=cwd) - def sdist(distname, version): dist_path = tmpdir.join('%s-%s.tar.gz' % (distname, version)) make_nspkg_sdist(str(dist_path), distname, version) @@ -182,28 +138,24 @@ def _check_test_command_install_requirements(virtualenv, tmpdir, cwd): open('success', 'w').close() ''')) - # Run test command for test package. - # use 'virtualenv.python' as workaround for man-group/pytest-plugins#166 - cmd = [virtualenv.python, 'setup.py', 'test', '-s', 'test'] - virtualenv.run(cmd, cd=str(tmpdir)) + + cmd = ["python", 'setup.py', 'test', '-s', 'test'] + venv.run(cmd, cwd=str(tmpdir)) assert tmpdir.join('success').check() -def test_test_command_install_requirements(virtualenv, tmpdir, request): +def test_test_command_install_requirements(venv, tmpdir, tmpdir_cwd): # Ensure pip/wheel packages are installed. - virtualenv.run( - "python -c \"__import__('pkg_resources').require(['pip', 'wheel'])\"") - # uninstall setuptools so that 'setup.py develop' works - virtualenv.run("python -m pip uninstall -y setuptools") + venv.run(["python", "-c", "__import__('pkg_resources').require(['pip', 'wheel'])"]) # disable index URL so bits and bobs aren't requested from PyPI - virtualenv.env['PIP_NO_INDEX'] = '1' - _check_test_command_install_requirements(virtualenv, tmpdir, request.config.rootdir) + with contexts.environment(PYTHONPATH=None, PIP_NO_INDEX="1"): + _check_test_command_install_requirements(venv, tmpdir) -def test_no_missing_dependencies(bare_virtualenv, request): +def test_no_missing_dependencies(bare_venv, request): """ Quick and dirty test to ensure all external dependencies are vendored. """ + setuptools_dir = request.config.rootdir for command in ('upload',): # sorted(distutils.command.__all__): - cmd = [bare_virtualenv.python, 'setup.py', command, '-h'] - bare_virtualenv.run(cmd, cd=request.config.rootdir) + bare_venv.run(['python', 'setup.py', command, '-h'], cwd=setuptools_dir) diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py index 7345b135..89d65d0b 100644 --- a/setuptools/tests/test_wheel.py +++ b/setuptools/tests/test_wheel.py @@ -6,6 +6,8 @@ from distutils.sysconfig import get_config_var from distutils.util import get_platform import contextlib +import pathlib +import stat import glob import inspect import os @@ -148,6 +150,7 @@ def _check_wheel_install(filename, install_dir, install_tree_includes, if requires_txt is None: assert not dist.has_metadata('requires.txt') else: + # Order must match to ensure reproducibility. assert requires_txt == dist.get_metadata('requires.txt').lstrip() @@ -420,6 +423,38 @@ WHEEL_INSTALL_TESTS = ( ), dict( + id='requires_ensure_order', + install_requires=''' + foo + bar + baz + qux + ''', + extras_require={ + 'extra': ''' + foobar>3 + barbaz>4 + bazqux>5 + quxzap>6 + ''', + }, + requires_txt=DALS( + ''' + foo + bar + baz + qux + + [extra] + foobar>3 + barbaz>4 + bazqux>5 + quxzap>6 + ''' + ), + ), + + dict( id='namespace_package', file_defs={ 'foo': { @@ -581,3 +616,88 @@ def test_wheel_is_compatible(monkeypatch): monkeypatch.setattr('setuptools.wheel.sys_tags', sys_tags) assert Wheel( 'onnxruntime-0.1.2-cp36-cp36m-manylinux1_x86_64.whl').is_compatible() + + +def test_wheel_mode(): + @contextlib.contextmanager + def build_wheel(extra_file_defs=None, **kwargs): + file_defs = { + 'setup.py': (DALS( + ''' + # -*- coding: utf-8 -*- + from setuptools import setup + import setuptools + setup(**%r) + ''' + ) % kwargs).encode('utf-8'), + } + if extra_file_defs: + file_defs.update(extra_file_defs) + with tempdir() as source_dir: + path.build(file_defs, source_dir) + runsh = pathlib.Path(source_dir) / "script.sh" + os.chmod(runsh, 0o777) + subprocess.check_call((sys.executable, 'setup.py', + '-q', 'bdist_wheel'), cwd=source_dir) + yield glob.glob(os.path.join(source_dir, 'dist', '*.whl'))[0] + + params = dict( + id='script', + file_defs={ + 'script.py': DALS( + ''' + #/usr/bin/python + print('hello world!') + ''' + ), + 'script.sh': DALS( + ''' + #/bin/sh + echo 'hello world!' + ''' + ), + }, + setup_kwargs=dict( + scripts=['script.py', 'script.sh'], + ), + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': { + 'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'top_level.txt', + {'scripts': [ + 'script.py', + 'script.sh' + ]} + + ] + } + }) + ) + + project_name = params.get('name', 'foo') + version = params.get('version', '1.0') + install_tree = params.get('install_tree') + file_defs = params.get('file_defs', {}) + setup_kwargs = params.get('setup_kwargs', {}) + + with build_wheel( + name=project_name, + version=version, + install_requires=[], + extras_require={}, + extra_file_defs=file_defs, + **setup_kwargs + ) as filename, tempdir() as install_dir: + _check_wheel_install(filename, install_dir, + install_tree, project_name, + version, None) + w = Wheel(filename) + base = pathlib.Path(install_dir) / w.egg_name() + script_sh = base / "EGG-INFO" / "scripts" / "script.sh" + assert script_sh.exists() + if sys.platform != 'win32': + # Editable file mode has no effect on Windows + assert oct(stat.S_IMODE(script_sh.stat().st_mode)) == "0o777" diff --git a/setuptools/wheel.py b/setuptools/wheel.py index 0be811af..0ced0ff2 100644 --- a/setuptools/wheel.py +++ b/setuptools/wheel.py @@ -15,6 +15,7 @@ from pkg_resources import parse_version from setuptools.extern.packaging.tags import sys_tags from setuptools.extern.packaging.utils import canonicalize_name from setuptools.command.egg_info import write_requirements +from setuptools.archive_util import _unpack_zipfile_obj WHEEL_NAME = re.compile( @@ -121,8 +122,7 @@ class Wheel: raise ValueError( 'unsupported wheel format version: %s' % wheel_version) # Extract to target directory. - os.mkdir(destination_eggdir) - zf.extractall(destination_eggdir) + _unpack_zipfile_obj(zf, destination_eggdir) # Convert metadata. dist_info = os.path.join(destination_eggdir, dist_info) dist = pkg_resources.Distribution.from_location( @@ -136,13 +136,13 @@ class Wheel: def raw_req(req): req.marker = None return str(req) - install_requires = list(sorted(map(raw_req, dist.requires()))) + install_requires = list(map(raw_req, dist.requires())) extras_require = { - extra: sorted( + extra: [ req for req in map(raw_req, dist.requires((extra,))) if req not in install_requires - ) + ] for extra in dist.extras } os.rename(dist_info, egg_info) diff --git a/setuptools/windows_support.py b/setuptools/windows_support.py index cb977cff..1ca64fbb 100644 --- a/setuptools/windows_support.py +++ b/setuptools/windows_support.py @@ -1,5 +1,4 @@ import platform -import ctypes def windows_only(func): @@ -17,6 +16,7 @@ def hide_file(path): `path` must be text. """ + import ctypes __import__('ctypes.wintypes') SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD diff --git a/tools/finalize.py b/tools/finalize.py index 516a2fb5..5a4df5df 100644 --- a/tools/finalize.py +++ b/tools/finalize.py @@ -42,6 +42,7 @@ def update_changelog(): cmd = [ sys.executable, '-m', 'towncrier', + 'build', '--version', get_version(), '--yes', ] @@ -79,11 +80,18 @@ def check_changes(): """ allowed = 'deprecation', 'breaking', 'change', 'doc', 'misc' except_ = 'README.rst', '.gitignore' - assert all( - any(key in file.name for key in allowed) + news_fragments = ( + file for file in pathlib.Path('changelog.d').iterdir() if file.name not in except_ ) + unrecognized = [ + str(file) + for file in news_fragments + if not any(f".{key}" in file.suffixes for key in allowed) + ] + if unrecognized: + raise ValueError(f"Some news fragments have invalid names: {unrecognized}") if __name__ == '__main__': diff --git a/tools/generate_validation_code.py b/tools/generate_validation_code.py new file mode 100644 index 00000000..201d1b70 --- /dev/null +++ b/tools/generate_validation_code.py @@ -0,0 +1,30 @@ +import subprocess +import sys + +from pathlib import Path + + +def generate_pyproject_validation(dest: Path): + """ + Generates validation code for ``pyproject.toml`` based on JSON schemas and the + ``validate-pyproject`` library. + """ + cmd = [ + sys.executable, + "-m", + "validate_pyproject.vendoring", + f"--output-dir={dest}", + "--enable-plugins", + "setuptools", + "distutils", + "--very-verbose" + ] + subprocess.check_call(cmd) + print(f"Validation code generated at: {dest}") + + +def main(): + generate_pyproject_validation(Path("setuptools/config/_validate_pyproject")) + + +__name__ == '__main__' and main() diff --git a/msvc-build-launcher-arm64.cmd b/tools/msvc-build-launcher-arm64.cmd index 8e63506b..8e63506b 100644 --- a/msvc-build-launcher-arm64.cmd +++ b/tools/msvc-build-launcher-arm64.cmd diff --git a/msvc-build-launcher.cmd b/tools/msvc-build-launcher.cmd index 92da290e..92da290e 100644 --- a/msvc-build-launcher.cmd +++ b/tools/msvc-build-launcher.cmd diff --git a/towncrier_template.rst b/tools/towncrier_template.rst index 7f507342..7f507342 100644 --- a/towncrier_template.rst +++ b/tools/towncrier_template.rst diff --git a/tools/vendored.py b/tools/vendored.py new file mode 100644 index 00000000..cd15adbf --- /dev/null +++ b/tools/vendored.py @@ -0,0 +1,149 @@ +import re +import sys +import subprocess + +from path import Path + + +def remove_all(paths): + for path in paths: + path.rmtree() if path.isdir() else path.remove() + + +def update_vendored(): + update_pkg_resources() + update_setuptools() + + +def rewrite_packaging(pkg_files, new_root): + """ + Rewrite imports in packaging to redirect to vendored copies. + """ + for file in pkg_files.glob('*.py'): + text = file.text() + text = re.sub(r' (pyparsing)', rf' {new_root}.\1', text) + text = text.replace( + 'from six.moves.urllib import parse', + 'from urllib import parse', + ) + file.write_text(text) + + +def rewrite_jaraco_text(pkg_files, new_root): + """ + Rewrite imports in jaraco.text to redirect to vendored copies. + """ + for file in pkg_files.glob('*.py'): + text = file.read_text() + text = re.sub(r' (jaraco\.)', rf' {new_root}.\1', text) + text = re.sub(r' (importlib_resources)', rf' {new_root}.\1', text) + # suppress loading of lorem_ipsum; ref #3072 + text = re.sub(r'^lorem_ipsum.*\n$', '', text, flags=re.M) + file.write_text(text) + + +def rewrite_jaraco(pkg_files, new_root): + """ + Rewrite imports in jaraco.functools to redirect to vendored copies. + """ + for file in pkg_files.glob('*.py'): + text = file.read_text() + text = re.sub(r' (more_itertools)', rf' {new_root}.\1', text) + file.write_text(text) + # required for zip-packaged setuptools #3084 + pkg_files.joinpath('__init__.py').write_text('') + + +def rewrite_importlib_resources(pkg_files, new_root): + """ + Rewrite imports in importlib_resources to redirect to vendored copies. + """ + for file in pkg_files.glob('*.py'): + text = file.read_text().replace('importlib_resources.abc', '.abc') + text = text.replace('zipp', '..zipp') + file.write_text(text) + + +def rewrite_importlib_metadata(pkg_files, new_root): + """ + Rewrite imports in importlib_metadata to redirect to vendored copies. + """ + for file in pkg_files.glob('*.py'): + text = file.read_text().replace('typing_extensions', '..typing_extensions') + text = text.replace('import zipp', 'from .. import zipp') + file.write_text(text) + + +def rewrite_more_itertools(pkg_files: Path): + """ + Defer import of concurrent.futures. Workaround for #3090. + """ + more_file = pkg_files.joinpath('more.py') + text = more_file.read_text() + text = re.sub(r'^.*concurrent.futures.*?\n', '', text, flags=re.MULTILINE) + text = re.sub( + 'ThreadPoolExecutor', + '__import__("concurrent.futures").futures.ThreadPoolExecutor', + text, + ) + more_file.write_text(text) + + +def rewrite_nspektr(pkg_files: Path, new_root): + for file in pkg_files.glob('*.py'): + text = file.read_text() + text = re.sub(r' (more_itertools)', rf' {new_root}.\1', text) + text = re.sub(r' (jaraco\.\w+)', rf' {new_root}.\1', text) + text = re.sub(r' (packaging)', rf' {new_root}.\1', text) + text = re.sub(r' (importlib_metadata)', rf' {new_root}.\1', text) + file.write_text(text) + + +def clean(vendor): + """ + Remove all files out of the vendor directory except the meta + data (as pip uninstall doesn't support -t). + """ + remove_all( + path + for path in vendor.glob('*') + if path.basename() != 'vendored.txt' + ) + + +def install(vendor): + clean(vendor) + install_args = [ + sys.executable, + '-m', 'pip', + 'install', + '-r', str(vendor / 'vendored.txt'), + '-t', str(vendor), + ] + subprocess.check_call(install_args) + (vendor / '__init__.py').write_text('') + + +def update_pkg_resources(): + vendor = Path('pkg_resources/_vendor') + install(vendor) + rewrite_packaging(vendor / 'packaging', 'pkg_resources.extern') + rewrite_jaraco_text(vendor / 'jaraco/text', 'pkg_resources.extern') + rewrite_jaraco(vendor / 'jaraco', 'pkg_resources.extern') + rewrite_importlib_resources(vendor / 'importlib_resources', 'pkg_resources.extern') + rewrite_more_itertools(vendor / "more_itertools") + + +def update_setuptools(): + vendor = Path('setuptools/_vendor') + install(vendor) + rewrite_packaging(vendor / 'packaging', 'setuptools.extern') + rewrite_jaraco_text(vendor / 'jaraco/text', 'setuptools.extern') + rewrite_jaraco(vendor / 'jaraco', 'setuptools.extern') + rewrite_importlib_resources(vendor / 'importlib_resources', 'setuptools.extern') + rewrite_importlib_metadata(vendor / 'importlib_metadata', 'setuptools.extern') + rewrite_more_itertools(vendor / "more_itertools") + rewrite_nspektr(vendor / "nspektr", 'setuptools.extern') + + +__name__ == '__main__' and update_vendored() @@ -7,18 +7,39 @@ toxworkdir={env:TOX_WORK_DIR:.tox} [testenv] deps = - # workaround for sphinx-doc/sphinx#9562 - # TODO: remove after Sphinx>4.1.2 is available. - sphinx@git+https://github.com/sphinx-doc/sphinx; python_version>="3.10" - # TODO: remove after man-group/pytest-plugins#188 is solved - pytest-virtualenv @ git+https://github.com/jaraco/pytest-plugins@distutils-deprecated#subdirectory=pytest-virtualenv + # Ideally all the dependencies should be set as "extras" commands = pytest {posargs} usedevelop = True extras = testing passenv = SETUPTOOLS_USE_DISTUTILS + PRE_BUILT_SETUPTOOLS_WHEEL + PRE_BUILT_SETUPTOOLS_SDIST + TIMEOUT_BACKEND_TEST # timeout (in seconds) for test_build_meta windir # required for test_pkg_resources + # honor git config in pytest-perf + HOME + # workaround for tox-dev/tox#2382 + PROGRAMDATA + PROGRAMFILES + PROGRAMFILES(x86) + +[testenv:integration] +deps = {[testenv]deps} +extras = testing-integration +passenv = + {[testenv]passenv} + DOWNLOAD_PATH + # workaround for tox-dev/tox#2382 + PROGRAMDATA + PROGRAMFILES + PROGRAMFILES(x86) +setenv = + PROJECT_ROOT = {toxinidir} +commands = + pytest --integration {posargs:-vv --durations=10 setuptools/tests/integration} + # use verbose mode by default to facilitate debugging from CI logs [testenv:docs] extras = @@ -37,6 +58,20 @@ passenv = * commands = python tools/finalize.py +[testenv:vendor] +skip_install = True +deps = + path +commands = + python -m tools.vendored + +[testenv:generate-validation-code] +skip_install = True +deps = + validate-pyproject[all]==0.7.1 +commands = + python -m tools.generate_validation_code + [testenv:release] skip_install = True deps = @@ -49,7 +84,6 @@ passenv = setenv = TWINE_USERNAME = {env:TWINE_USERNAME:__token__} commands = - python -m bootstrap python -c "import shutil; shutil.rmtree('dist', ignore_errors=True)" # unset tag_build and tag_date pypa/setuptools#2500 python setup.py egg_info -Db "" saveopts |